1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: Master.proto
3 
4 package org.apache.hadoop.hbase.protobuf.generated;
5 
6 public final class MasterProtos {
MasterProtos()7   private MasterProtos() {}
registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8   public static void registerAllExtensions(
9       com.google.protobuf.ExtensionRegistry registry) {
10   }
11   public interface AddColumnRequestOrBuilder
12       extends com.google.protobuf.MessageOrBuilder {
13 
14     // required .TableName table_name = 1;
15     /**
16      * <code>required .TableName table_name = 1;</code>
17      */
hasTableName()18     boolean hasTableName();
19     /**
20      * <code>required .TableName table_name = 1;</code>
21      */
getTableName()22     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
23     /**
24      * <code>required .TableName table_name = 1;</code>
25      */
getTableNameOrBuilder()26     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
27 
28     // required .ColumnFamilySchema column_families = 2;
29     /**
30      * <code>required .ColumnFamilySchema column_families = 2;</code>
31      */
hasColumnFamilies()32     boolean hasColumnFamilies();
33     /**
34      * <code>required .ColumnFamilySchema column_families = 2;</code>
35      */
getColumnFamilies()36     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies();
37     /**
38      * <code>required .ColumnFamilySchema column_families = 2;</code>
39      */
getColumnFamiliesOrBuilder()40     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder();
41 
42     // optional uint64 nonce_group = 3 [default = 0];
43     /**
44      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
45      */
hasNonceGroup()46     boolean hasNonceGroup();
47     /**
48      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
49      */
getNonceGroup()50     long getNonceGroup();
51 
52     // optional uint64 nonce = 4 [default = 0];
53     /**
54      * <code>optional uint64 nonce = 4 [default = 0];</code>
55      */
hasNonce()56     boolean hasNonce();
57     /**
58      * <code>optional uint64 nonce = 4 [default = 0];</code>
59      */
getNonce()60     long getNonce();
61   }
62   /**
63    * Protobuf type {@code AddColumnRequest}
64    */
65   public static final class AddColumnRequest extends
66       com.google.protobuf.GeneratedMessage
67       implements AddColumnRequestOrBuilder {
68     // Use AddColumnRequest.newBuilder() to construct.
AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)69     private AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
70       super(builder);
71       this.unknownFields = builder.getUnknownFields();
72     }
AddColumnRequest(boolean noInit)73     private AddColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
74 
75     private static final AddColumnRequest defaultInstance;
getDefaultInstance()76     public static AddColumnRequest getDefaultInstance() {
77       return defaultInstance;
78     }
79 
getDefaultInstanceForType()80     public AddColumnRequest getDefaultInstanceForType() {
81       return defaultInstance;
82     }
83 
84     private final com.google.protobuf.UnknownFieldSet unknownFields;
85     @java.lang.Override
86     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()87         getUnknownFields() {
88       return this.unknownFields;
89     }
AddColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)90     private AddColumnRequest(
91         com.google.protobuf.CodedInputStream input,
92         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
93         throws com.google.protobuf.InvalidProtocolBufferException {
94       initFields();
95       int mutable_bitField0_ = 0;
96       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
97           com.google.protobuf.UnknownFieldSet.newBuilder();
98       try {
99         boolean done = false;
100         while (!done) {
101           int tag = input.readTag();
102           switch (tag) {
103             case 0:
104               done = true;
105               break;
106             default: {
107               if (!parseUnknownField(input, unknownFields,
108                                      extensionRegistry, tag)) {
109                 done = true;
110               }
111               break;
112             }
113             case 10: {
114               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
115               if (((bitField0_ & 0x00000001) == 0x00000001)) {
116                 subBuilder = tableName_.toBuilder();
117               }
118               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
119               if (subBuilder != null) {
120                 subBuilder.mergeFrom(tableName_);
121                 tableName_ = subBuilder.buildPartial();
122               }
123               bitField0_ |= 0x00000001;
124               break;
125             }
126             case 18: {
127               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null;
128               if (((bitField0_ & 0x00000002) == 0x00000002)) {
129                 subBuilder = columnFamilies_.toBuilder();
130               }
131               columnFamilies_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry);
132               if (subBuilder != null) {
133                 subBuilder.mergeFrom(columnFamilies_);
134                 columnFamilies_ = subBuilder.buildPartial();
135               }
136               bitField0_ |= 0x00000002;
137               break;
138             }
139             case 24: {
140               bitField0_ |= 0x00000004;
141               nonceGroup_ = input.readUInt64();
142               break;
143             }
144             case 32: {
145               bitField0_ |= 0x00000008;
146               nonce_ = input.readUInt64();
147               break;
148             }
149           }
150         }
151       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
152         throw e.setUnfinishedMessage(this);
153       } catch (java.io.IOException e) {
154         throw new com.google.protobuf.InvalidProtocolBufferException(
155             e.getMessage()).setUnfinishedMessage(this);
156       } finally {
157         this.unknownFields = unknownFields.build();
158         makeExtensionsImmutable();
159       }
160     }
161     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()162         getDescriptor() {
163       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor;
164     }
165 
166     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()167         internalGetFieldAccessorTable() {
168       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_fieldAccessorTable
169           .ensureFieldAccessorsInitialized(
170               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class);
171     }
172 
173     public static com.google.protobuf.Parser<AddColumnRequest> PARSER =
174         new com.google.protobuf.AbstractParser<AddColumnRequest>() {
175       public AddColumnRequest parsePartialFrom(
176           com.google.protobuf.CodedInputStream input,
177           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
178           throws com.google.protobuf.InvalidProtocolBufferException {
179         return new AddColumnRequest(input, extensionRegistry);
180       }
181     };
182 
183     @java.lang.Override
getParserForType()184     public com.google.protobuf.Parser<AddColumnRequest> getParserForType() {
185       return PARSER;
186     }
187 
188     private int bitField0_;
189     // required .TableName table_name = 1;
190     public static final int TABLE_NAME_FIELD_NUMBER = 1;
191     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
192     /**
193      * <code>required .TableName table_name = 1;</code>
194      */
hasTableName()195     public boolean hasTableName() {
196       return ((bitField0_ & 0x00000001) == 0x00000001);
197     }
198     /**
199      * <code>required .TableName table_name = 1;</code>
200      */
getTableName()201     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
202       return tableName_;
203     }
204     /**
205      * <code>required .TableName table_name = 1;</code>
206      */
getTableNameOrBuilder()207     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
208       return tableName_;
209     }
210 
211     // required .ColumnFamilySchema column_families = 2;
212     public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2;
213     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_;
214     /**
215      * <code>required .ColumnFamilySchema column_families = 2;</code>
216      */
hasColumnFamilies()217     public boolean hasColumnFamilies() {
218       return ((bitField0_ & 0x00000002) == 0x00000002);
219     }
220     /**
221      * <code>required .ColumnFamilySchema column_families = 2;</code>
222      */
getColumnFamilies()223     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() {
224       return columnFamilies_;
225     }
226     /**
227      * <code>required .ColumnFamilySchema column_families = 2;</code>
228      */
getColumnFamiliesOrBuilder()229     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() {
230       return columnFamilies_;
231     }
232 
233     // optional uint64 nonce_group = 3 [default = 0];
234     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
235     private long nonceGroup_;
236     /**
237      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
238      */
hasNonceGroup()239     public boolean hasNonceGroup() {
240       return ((bitField0_ & 0x00000004) == 0x00000004);
241     }
242     /**
243      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
244      */
getNonceGroup()245     public long getNonceGroup() {
246       return nonceGroup_;
247     }
248 
249     // optional uint64 nonce = 4 [default = 0];
250     public static final int NONCE_FIELD_NUMBER = 4;
251     private long nonce_;
252     /**
253      * <code>optional uint64 nonce = 4 [default = 0];</code>
254      */
hasNonce()255     public boolean hasNonce() {
256       return ((bitField0_ & 0x00000008) == 0x00000008);
257     }
258     /**
259      * <code>optional uint64 nonce = 4 [default = 0];</code>
260      */
getNonce()261     public long getNonce() {
262       return nonce_;
263     }
264 
initFields()265     private void initFields() {
266       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
267       columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
268       nonceGroup_ = 0L;
269       nonce_ = 0L;
270     }
271     private byte memoizedIsInitialized = -1;
isInitialized()272     public final boolean isInitialized() {
273       byte isInitialized = memoizedIsInitialized;
274       if (isInitialized != -1) return isInitialized == 1;
275 
276       if (!hasTableName()) {
277         memoizedIsInitialized = 0;
278         return false;
279       }
280       if (!hasColumnFamilies()) {
281         memoizedIsInitialized = 0;
282         return false;
283       }
284       if (!getTableName().isInitialized()) {
285         memoizedIsInitialized = 0;
286         return false;
287       }
288       if (!getColumnFamilies().isInitialized()) {
289         memoizedIsInitialized = 0;
290         return false;
291       }
292       memoizedIsInitialized = 1;
293       return true;
294     }
295 
writeTo(com.google.protobuf.CodedOutputStream output)296     public void writeTo(com.google.protobuf.CodedOutputStream output)
297                         throws java.io.IOException {
298       getSerializedSize();
299       if (((bitField0_ & 0x00000001) == 0x00000001)) {
300         output.writeMessage(1, tableName_);
301       }
302       if (((bitField0_ & 0x00000002) == 0x00000002)) {
303         output.writeMessage(2, columnFamilies_);
304       }
305       if (((bitField0_ & 0x00000004) == 0x00000004)) {
306         output.writeUInt64(3, nonceGroup_);
307       }
308       if (((bitField0_ & 0x00000008) == 0x00000008)) {
309         output.writeUInt64(4, nonce_);
310       }
311       getUnknownFields().writeTo(output);
312     }
313 
314     private int memoizedSerializedSize = -1;
getSerializedSize()315     public int getSerializedSize() {
316       int size = memoizedSerializedSize;
317       if (size != -1) return size;
318 
319       size = 0;
320       if (((bitField0_ & 0x00000001) == 0x00000001)) {
321         size += com.google.protobuf.CodedOutputStream
322           .computeMessageSize(1, tableName_);
323       }
324       if (((bitField0_ & 0x00000002) == 0x00000002)) {
325         size += com.google.protobuf.CodedOutputStream
326           .computeMessageSize(2, columnFamilies_);
327       }
328       if (((bitField0_ & 0x00000004) == 0x00000004)) {
329         size += com.google.protobuf.CodedOutputStream
330           .computeUInt64Size(3, nonceGroup_);
331       }
332       if (((bitField0_ & 0x00000008) == 0x00000008)) {
333         size += com.google.protobuf.CodedOutputStream
334           .computeUInt64Size(4, nonce_);
335       }
336       size += getUnknownFields().getSerializedSize();
337       memoizedSerializedSize = size;
338       return size;
339     }
340 
341     private static final long serialVersionUID = 0L;
342     @java.lang.Override
writeReplace()343     protected java.lang.Object writeReplace()
344         throws java.io.ObjectStreamException {
345       return super.writeReplace();
346     }
347 
348     @java.lang.Override
equals(final java.lang.Object obj)349     public boolean equals(final java.lang.Object obj) {
350       if (obj == this) {
351        return true;
352       }
353       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)) {
354         return super.equals(obj);
355       }
356       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) obj;
357 
358       boolean result = true;
359       result = result && (hasTableName() == other.hasTableName());
360       if (hasTableName()) {
361         result = result && getTableName()
362             .equals(other.getTableName());
363       }
364       result = result && (hasColumnFamilies() == other.hasColumnFamilies());
365       if (hasColumnFamilies()) {
366         result = result && getColumnFamilies()
367             .equals(other.getColumnFamilies());
368       }
369       result = result && (hasNonceGroup() == other.hasNonceGroup());
370       if (hasNonceGroup()) {
371         result = result && (getNonceGroup()
372             == other.getNonceGroup());
373       }
374       result = result && (hasNonce() == other.hasNonce());
375       if (hasNonce()) {
376         result = result && (getNonce()
377             == other.getNonce());
378       }
379       result = result &&
380           getUnknownFields().equals(other.getUnknownFields());
381       return result;
382     }
383 
384     private int memoizedHashCode = 0;
385     @java.lang.Override
hashCode()386     public int hashCode() {
387       if (memoizedHashCode != 0) {
388         return memoizedHashCode;
389       }
390       int hash = 41;
391       hash = (19 * hash) + getDescriptorForType().hashCode();
392       if (hasTableName()) {
393         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
394         hash = (53 * hash) + getTableName().hashCode();
395       }
396       if (hasColumnFamilies()) {
397         hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER;
398         hash = (53 * hash) + getColumnFamilies().hashCode();
399       }
400       if (hasNonceGroup()) {
401         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
402         hash = (53 * hash) + hashLong(getNonceGroup());
403       }
404       if (hasNonce()) {
405         hash = (37 * hash) + NONCE_FIELD_NUMBER;
406         hash = (53 * hash) + hashLong(getNonce());
407       }
408       hash = (29 * hash) + getUnknownFields().hashCode();
409       memoizedHashCode = hash;
410       return hash;
411     }
412 
parseFrom( com.google.protobuf.ByteString data)413     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
414         com.google.protobuf.ByteString data)
415         throws com.google.protobuf.InvalidProtocolBufferException {
416       return PARSER.parseFrom(data);
417     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)418     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
419         com.google.protobuf.ByteString data,
420         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
421         throws com.google.protobuf.InvalidProtocolBufferException {
422       return PARSER.parseFrom(data, extensionRegistry);
423     }
parseFrom(byte[] data)424     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(byte[] data)
425         throws com.google.protobuf.InvalidProtocolBufferException {
426       return PARSER.parseFrom(data);
427     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)428     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
429         byte[] data,
430         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
431         throws com.google.protobuf.InvalidProtocolBufferException {
432       return PARSER.parseFrom(data, extensionRegistry);
433     }
parseFrom(java.io.InputStream input)434     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(java.io.InputStream input)
435         throws java.io.IOException {
436       return PARSER.parseFrom(input);
437     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)438     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
439         java.io.InputStream input,
440         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
441         throws java.io.IOException {
442       return PARSER.parseFrom(input, extensionRegistry);
443     }
parseDelimitedFrom(java.io.InputStream input)444     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input)
445         throws java.io.IOException {
446       return PARSER.parseDelimitedFrom(input);
447     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)448     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseDelimitedFrom(
449         java.io.InputStream input,
450         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
451         throws java.io.IOException {
452       return PARSER.parseDelimitedFrom(input, extensionRegistry);
453     }
parseFrom( com.google.protobuf.CodedInputStream input)454     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
455         com.google.protobuf.CodedInputStream input)
456         throws java.io.IOException {
457       return PARSER.parseFrom(input);
458     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)459     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parseFrom(
460         com.google.protobuf.CodedInputStream input,
461         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
462         throws java.io.IOException {
463       return PARSER.parseFrom(input, extensionRegistry);
464     }
465 
newBuilder()466     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()467     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest prototype)468     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest prototype) {
469       return newBuilder().mergeFrom(prototype);
470     }
toBuilder()471     public Builder toBuilder() { return newBuilder(this); }
472 
473     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)474     protected Builder newBuilderForType(
475         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
476       Builder builder = new Builder(parent);
477       return builder;
478     }
479     /**
480      * Protobuf type {@code AddColumnRequest}
481      */
482     public static final class Builder extends
483         com.google.protobuf.GeneratedMessage.Builder<Builder>
484        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequestOrBuilder {
485       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()486           getDescriptor() {
487         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor;
488       }
489 
490       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()491           internalGetFieldAccessorTable() {
492         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_fieldAccessorTable
493             .ensureFieldAccessorsInitialized(
494                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.Builder.class);
495       }
496 
497       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.newBuilder()
Builder()498       private Builder() {
499         maybeForceBuilderInitialization();
500       }
501 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)502       private Builder(
503           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
504         super(parent);
505         maybeForceBuilderInitialization();
506       }
maybeForceBuilderInitialization()507       private void maybeForceBuilderInitialization() {
508         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
509           getTableNameFieldBuilder();
510           getColumnFamiliesFieldBuilder();
511         }
512       }
create()513       private static Builder create() {
514         return new Builder();
515       }
516 
clear()517       public Builder clear() {
518         super.clear();
519         if (tableNameBuilder_ == null) {
520           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
521         } else {
522           tableNameBuilder_.clear();
523         }
524         bitField0_ = (bitField0_ & ~0x00000001);
525         if (columnFamiliesBuilder_ == null) {
526           columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
527         } else {
528           columnFamiliesBuilder_.clear();
529         }
530         bitField0_ = (bitField0_ & ~0x00000002);
531         nonceGroup_ = 0L;
532         bitField0_ = (bitField0_ & ~0x00000004);
533         nonce_ = 0L;
534         bitField0_ = (bitField0_ & ~0x00000008);
535         return this;
536       }
537 
clone()538       public Builder clone() {
539         return create().mergeFrom(buildPartial());
540       }
541 
542       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()543           getDescriptorForType() {
544         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnRequest_descriptor;
545       }
546 
getDefaultInstanceForType()547       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest getDefaultInstanceForType() {
548         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance();
549       }
550 
build()551       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest build() {
552         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = buildPartial();
553         if (!result.isInitialized()) {
554           throw newUninitializedMessageException(result);
555         }
556         return result;
557       }
558 
buildPartial()559       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest buildPartial() {
560         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest(this);
561         int from_bitField0_ = bitField0_;
562         int to_bitField0_ = 0;
563         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
564           to_bitField0_ |= 0x00000001;
565         }
566         if (tableNameBuilder_ == null) {
567           result.tableName_ = tableName_;
568         } else {
569           result.tableName_ = tableNameBuilder_.build();
570         }
571         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
572           to_bitField0_ |= 0x00000002;
573         }
574         if (columnFamiliesBuilder_ == null) {
575           result.columnFamilies_ = columnFamilies_;
576         } else {
577           result.columnFamilies_ = columnFamiliesBuilder_.build();
578         }
579         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
580           to_bitField0_ |= 0x00000004;
581         }
582         result.nonceGroup_ = nonceGroup_;
583         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
584           to_bitField0_ |= 0x00000008;
585         }
586         result.nonce_ = nonce_;
587         result.bitField0_ = to_bitField0_;
588         onBuilt();
589         return result;
590       }
591 
mergeFrom(com.google.protobuf.Message other)592       public Builder mergeFrom(com.google.protobuf.Message other) {
593         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) {
594           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)other);
595         } else {
596           super.mergeFrom(other);
597           return this;
598         }
599       }
600 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other)601       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest other) {
602         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance()) return this;
603         if (other.hasTableName()) {
604           mergeTableName(other.getTableName());
605         }
606         if (other.hasColumnFamilies()) {
607           mergeColumnFamilies(other.getColumnFamilies());
608         }
609         if (other.hasNonceGroup()) {
610           setNonceGroup(other.getNonceGroup());
611         }
612         if (other.hasNonce()) {
613           setNonce(other.getNonce());
614         }
615         this.mergeUnknownFields(other.getUnknownFields());
616         return this;
617       }
618 
isInitialized()619       public final boolean isInitialized() {
620         if (!hasTableName()) {
621 
622           return false;
623         }
624         if (!hasColumnFamilies()) {
625 
626           return false;
627         }
628         if (!getTableName().isInitialized()) {
629 
630           return false;
631         }
632         if (!getColumnFamilies().isInitialized()) {
633 
634           return false;
635         }
636         return true;
637       }
638 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)639       public Builder mergeFrom(
640           com.google.protobuf.CodedInputStream input,
641           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
642           throws java.io.IOException {
643         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest parsedMessage = null;
644         try {
645           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
646         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
647           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest) e.getUnfinishedMessage();
648           throw e;
649         } finally {
650           if (parsedMessage != null) {
651             mergeFrom(parsedMessage);
652           }
653         }
654         return this;
655       }
656       private int bitField0_;
657 
658       // required .TableName table_name = 1;
659       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
660       private com.google.protobuf.SingleFieldBuilder<
661           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
662       /**
663        * <code>required .TableName table_name = 1;</code>
664        */
hasTableName()665       public boolean hasTableName() {
666         return ((bitField0_ & 0x00000001) == 0x00000001);
667       }
668       /**
669        * <code>required .TableName table_name = 1;</code>
670        */
getTableName()671       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
672         if (tableNameBuilder_ == null) {
673           return tableName_;
674         } else {
675           return tableNameBuilder_.getMessage();
676         }
677       }
678       /**
679        * <code>required .TableName table_name = 1;</code>
680        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)681       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
682         if (tableNameBuilder_ == null) {
683           if (value == null) {
684             throw new NullPointerException();
685           }
686           tableName_ = value;
687           onChanged();
688         } else {
689           tableNameBuilder_.setMessage(value);
690         }
691         bitField0_ |= 0x00000001;
692         return this;
693       }
694       /**
695        * <code>required .TableName table_name = 1;</code>
696        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)697       public Builder setTableName(
698           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
699         if (tableNameBuilder_ == null) {
700           tableName_ = builderForValue.build();
701           onChanged();
702         } else {
703           tableNameBuilder_.setMessage(builderForValue.build());
704         }
705         bitField0_ |= 0x00000001;
706         return this;
707       }
708       /**
709        * <code>required .TableName table_name = 1;</code>
710        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)711       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
712         if (tableNameBuilder_ == null) {
713           if (((bitField0_ & 0x00000001) == 0x00000001) &&
714               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
715             tableName_ =
716               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
717           } else {
718             tableName_ = value;
719           }
720           onChanged();
721         } else {
722           tableNameBuilder_.mergeFrom(value);
723         }
724         bitField0_ |= 0x00000001;
725         return this;
726       }
727       /**
728        * <code>required .TableName table_name = 1;</code>
729        */
clearTableName()730       public Builder clearTableName() {
731         if (tableNameBuilder_ == null) {
732           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
733           onChanged();
734         } else {
735           tableNameBuilder_.clear();
736         }
737         bitField0_ = (bitField0_ & ~0x00000001);
738         return this;
739       }
740       /**
741        * <code>required .TableName table_name = 1;</code>
742        */
getTableNameBuilder()743       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
744         bitField0_ |= 0x00000001;
745         onChanged();
746         return getTableNameFieldBuilder().getBuilder();
747       }
748       /**
749        * <code>required .TableName table_name = 1;</code>
750        */
getTableNameOrBuilder()751       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
752         if (tableNameBuilder_ != null) {
753           return tableNameBuilder_.getMessageOrBuilder();
754         } else {
755           return tableName_;
756         }
757       }
758       /**
759        * <code>required .TableName table_name = 1;</code>
760        */
761       private com.google.protobuf.SingleFieldBuilder<
762           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()763           getTableNameFieldBuilder() {
764         if (tableNameBuilder_ == null) {
765           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
766               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
767                   tableName_,
768                   getParentForChildren(),
769                   isClean());
770           tableName_ = null;
771         }
772         return tableNameBuilder_;
773       }
774 
775       // required .ColumnFamilySchema column_families = 2;
776       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
777       private com.google.protobuf.SingleFieldBuilder<
778           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_;
779       /**
780        * <code>required .ColumnFamilySchema column_families = 2;</code>
781        */
hasColumnFamilies()782       public boolean hasColumnFamilies() {
783         return ((bitField0_ & 0x00000002) == 0x00000002);
784       }
785       /**
786        * <code>required .ColumnFamilySchema column_families = 2;</code>
787        */
getColumnFamilies()788       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() {
789         if (columnFamiliesBuilder_ == null) {
790           return columnFamilies_;
791         } else {
792           return columnFamiliesBuilder_.getMessage();
793         }
794       }
795       /**
796        * <code>required .ColumnFamilySchema column_families = 2;</code>
797        */
setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)798       public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
799         if (columnFamiliesBuilder_ == null) {
800           if (value == null) {
801             throw new NullPointerException();
802           }
803           columnFamilies_ = value;
804           onChanged();
805         } else {
806           columnFamiliesBuilder_.setMessage(value);
807         }
808         bitField0_ |= 0x00000002;
809         return this;
810       }
811       /**
812        * <code>required .ColumnFamilySchema column_families = 2;</code>
813        */
setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue)814       public Builder setColumnFamilies(
815           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
816         if (columnFamiliesBuilder_ == null) {
817           columnFamilies_ = builderForValue.build();
818           onChanged();
819         } else {
820           columnFamiliesBuilder_.setMessage(builderForValue.build());
821         }
822         bitField0_ |= 0x00000002;
823         return this;
824       }
825       /**
826        * <code>required .ColumnFamilySchema column_families = 2;</code>
827        */
mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)828       public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
829         if (columnFamiliesBuilder_ == null) {
830           if (((bitField0_ & 0x00000002) == 0x00000002) &&
831               columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) {
832             columnFamilies_ =
833               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial();
834           } else {
835             columnFamilies_ = value;
836           }
837           onChanged();
838         } else {
839           columnFamiliesBuilder_.mergeFrom(value);
840         }
841         bitField0_ |= 0x00000002;
842         return this;
843       }
844       /**
845        * <code>required .ColumnFamilySchema column_families = 2;</code>
846        */
clearColumnFamilies()847       public Builder clearColumnFamilies() {
848         if (columnFamiliesBuilder_ == null) {
849           columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
850           onChanged();
851         } else {
852           columnFamiliesBuilder_.clear();
853         }
854         bitField0_ = (bitField0_ & ~0x00000002);
855         return this;
856       }
857       /**
858        * <code>required .ColumnFamilySchema column_families = 2;</code>
859        */
getColumnFamiliesBuilder()860       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() {
861         bitField0_ |= 0x00000002;
862         onChanged();
863         return getColumnFamiliesFieldBuilder().getBuilder();
864       }
865       /**
866        * <code>required .ColumnFamilySchema column_families = 2;</code>
867        */
getColumnFamiliesOrBuilder()868       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() {
869         if (columnFamiliesBuilder_ != null) {
870           return columnFamiliesBuilder_.getMessageOrBuilder();
871         } else {
872           return columnFamilies_;
873         }
874       }
875       /**
876        * <code>required .ColumnFamilySchema column_families = 2;</code>
877        */
878       private com.google.protobuf.SingleFieldBuilder<
879           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
getColumnFamiliesFieldBuilder()880           getColumnFamiliesFieldBuilder() {
881         if (columnFamiliesBuilder_ == null) {
882           columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder<
883               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>(
884                   columnFamilies_,
885                   getParentForChildren(),
886                   isClean());
887           columnFamilies_ = null;
888         }
889         return columnFamiliesBuilder_;
890       }
891 
892       // optional uint64 nonce_group = 3 [default = 0];
893       private long nonceGroup_ ;
894       /**
895        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
896        */
hasNonceGroup()897       public boolean hasNonceGroup() {
898         return ((bitField0_ & 0x00000004) == 0x00000004);
899       }
900       /**
901        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
902        */
getNonceGroup()903       public long getNonceGroup() {
904         return nonceGroup_;
905       }
906       /**
907        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
908        */
setNonceGroup(long value)909       public Builder setNonceGroup(long value) {
910         bitField0_ |= 0x00000004;
911         nonceGroup_ = value;
912         onChanged();
913         return this;
914       }
915       /**
916        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
917        */
clearNonceGroup()918       public Builder clearNonceGroup() {
919         bitField0_ = (bitField0_ & ~0x00000004);
920         nonceGroup_ = 0L;
921         onChanged();
922         return this;
923       }
924 
925       // optional uint64 nonce = 4 [default = 0];
926       private long nonce_ ;
927       /**
928        * <code>optional uint64 nonce = 4 [default = 0];</code>
929        */
hasNonce()930       public boolean hasNonce() {
931         return ((bitField0_ & 0x00000008) == 0x00000008);
932       }
933       /**
934        * <code>optional uint64 nonce = 4 [default = 0];</code>
935        */
getNonce()936       public long getNonce() {
937         return nonce_;
938       }
939       /**
940        * <code>optional uint64 nonce = 4 [default = 0];</code>
941        */
setNonce(long value)942       public Builder setNonce(long value) {
943         bitField0_ |= 0x00000008;
944         nonce_ = value;
945         onChanged();
946         return this;
947       }
948       /**
949        * <code>optional uint64 nonce = 4 [default = 0];</code>
950        */
clearNonce()951       public Builder clearNonce() {
952         bitField0_ = (bitField0_ & ~0x00000008);
953         nonce_ = 0L;
954         onChanged();
955         return this;
956       }
957 
958       // @@protoc_insertion_point(builder_scope:AddColumnRequest)
959     }
960 
961     static {
962       defaultInstance = new AddColumnRequest(true);
defaultInstance.initFields()963       defaultInstance.initFields();
964     }
965 
966     // @@protoc_insertion_point(class_scope:AddColumnRequest)
967   }
968 
969   public interface AddColumnResponseOrBuilder
970       extends com.google.protobuf.MessageOrBuilder {
971   }
972   /**
973    * Protobuf type {@code AddColumnResponse}
974    */
975   public static final class AddColumnResponse extends
976       com.google.protobuf.GeneratedMessage
977       implements AddColumnResponseOrBuilder {
978     // Use AddColumnResponse.newBuilder() to construct.
AddColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)979     private AddColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
980       super(builder);
981       this.unknownFields = builder.getUnknownFields();
982     }
AddColumnResponse(boolean noInit)983     private AddColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
984 
985     private static final AddColumnResponse defaultInstance;
getDefaultInstance()986     public static AddColumnResponse getDefaultInstance() {
987       return defaultInstance;
988     }
989 
getDefaultInstanceForType()990     public AddColumnResponse getDefaultInstanceForType() {
991       return defaultInstance;
992     }
993 
994     private final com.google.protobuf.UnknownFieldSet unknownFields;
995     @java.lang.Override
996     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()997         getUnknownFields() {
998       return this.unknownFields;
999     }
AddColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1000     private AddColumnResponse(
1001         com.google.protobuf.CodedInputStream input,
1002         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1003         throws com.google.protobuf.InvalidProtocolBufferException {
1004       initFields();
1005       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1006           com.google.protobuf.UnknownFieldSet.newBuilder();
1007       try {
1008         boolean done = false;
1009         while (!done) {
1010           int tag = input.readTag();
1011           switch (tag) {
1012             case 0:
1013               done = true;
1014               break;
1015             default: {
1016               if (!parseUnknownField(input, unknownFields,
1017                                      extensionRegistry, tag)) {
1018                 done = true;
1019               }
1020               break;
1021             }
1022           }
1023         }
1024       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1025         throw e.setUnfinishedMessage(this);
1026       } catch (java.io.IOException e) {
1027         throw new com.google.protobuf.InvalidProtocolBufferException(
1028             e.getMessage()).setUnfinishedMessage(this);
1029       } finally {
1030         this.unknownFields = unknownFields.build();
1031         makeExtensionsImmutable();
1032       }
1033     }
1034     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1035         getDescriptor() {
1036       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor;
1037     }
1038 
1039     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1040         internalGetFieldAccessorTable() {
1041       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_fieldAccessorTable
1042           .ensureFieldAccessorsInitialized(
1043               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class);
1044     }
1045 
1046     public static com.google.protobuf.Parser<AddColumnResponse> PARSER =
1047         new com.google.protobuf.AbstractParser<AddColumnResponse>() {
1048       public AddColumnResponse parsePartialFrom(
1049           com.google.protobuf.CodedInputStream input,
1050           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1051           throws com.google.protobuf.InvalidProtocolBufferException {
1052         return new AddColumnResponse(input, extensionRegistry);
1053       }
1054     };
1055 
1056     @java.lang.Override
getParserForType()1057     public com.google.protobuf.Parser<AddColumnResponse> getParserForType() {
1058       return PARSER;
1059     }
1060 
initFields()1061     private void initFields() {
1062     }
1063     private byte memoizedIsInitialized = -1;
isInitialized()1064     public final boolean isInitialized() {
1065       byte isInitialized = memoizedIsInitialized;
1066       if (isInitialized != -1) return isInitialized == 1;
1067 
1068       memoizedIsInitialized = 1;
1069       return true;
1070     }
1071 
writeTo(com.google.protobuf.CodedOutputStream output)1072     public void writeTo(com.google.protobuf.CodedOutputStream output)
1073                         throws java.io.IOException {
1074       getSerializedSize();
1075       getUnknownFields().writeTo(output);
1076     }
1077 
1078     private int memoizedSerializedSize = -1;
getSerializedSize()1079     public int getSerializedSize() {
1080       int size = memoizedSerializedSize;
1081       if (size != -1) return size;
1082 
1083       size = 0;
1084       size += getUnknownFields().getSerializedSize();
1085       memoizedSerializedSize = size;
1086       return size;
1087     }
1088 
1089     private static final long serialVersionUID = 0L;
1090     @java.lang.Override
writeReplace()1091     protected java.lang.Object writeReplace()
1092         throws java.io.ObjectStreamException {
1093       return super.writeReplace();
1094     }
1095 
1096     @java.lang.Override
equals(final java.lang.Object obj)1097     public boolean equals(final java.lang.Object obj) {
1098       if (obj == this) {
1099        return true;
1100       }
1101       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse)) {
1102         return super.equals(obj);
1103       }
1104       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) obj;
1105 
1106       boolean result = true;
1107       result = result &&
1108           getUnknownFields().equals(other.getUnknownFields());
1109       return result;
1110     }
1111 
1112     private int memoizedHashCode = 0;
1113     @java.lang.Override
hashCode()1114     public int hashCode() {
1115       if (memoizedHashCode != 0) {
1116         return memoizedHashCode;
1117       }
1118       int hash = 41;
1119       hash = (19 * hash) + getDescriptorForType().hashCode();
1120       hash = (29 * hash) + getUnknownFields().hashCode();
1121       memoizedHashCode = hash;
1122       return hash;
1123     }
1124 
parseFrom( com.google.protobuf.ByteString data)1125     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
1126         com.google.protobuf.ByteString data)
1127         throws com.google.protobuf.InvalidProtocolBufferException {
1128       return PARSER.parseFrom(data);
1129     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1130     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
1131         com.google.protobuf.ByteString data,
1132         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1133         throws com.google.protobuf.InvalidProtocolBufferException {
1134       return PARSER.parseFrom(data, extensionRegistry);
1135     }
parseFrom(byte[] data)1136     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(byte[] data)
1137         throws com.google.protobuf.InvalidProtocolBufferException {
1138       return PARSER.parseFrom(data);
1139     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1140     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
1141         byte[] data,
1142         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1143         throws com.google.protobuf.InvalidProtocolBufferException {
1144       return PARSER.parseFrom(data, extensionRegistry);
1145     }
parseFrom(java.io.InputStream input)1146     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(java.io.InputStream input)
1147         throws java.io.IOException {
1148       return PARSER.parseFrom(input);
1149     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1150     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
1151         java.io.InputStream input,
1152         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1153         throws java.io.IOException {
1154       return PARSER.parseFrom(input, extensionRegistry);
1155     }
parseDelimitedFrom(java.io.InputStream input)1156     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(java.io.InputStream input)
1157         throws java.io.IOException {
1158       return PARSER.parseDelimitedFrom(input);
1159     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1160     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseDelimitedFrom(
1161         java.io.InputStream input,
1162         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1163         throws java.io.IOException {
1164       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1165     }
parseFrom( com.google.protobuf.CodedInputStream input)1166     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
1167         com.google.protobuf.CodedInputStream input)
1168         throws java.io.IOException {
1169       return PARSER.parseFrom(input);
1170     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1171     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parseFrom(
1172         com.google.protobuf.CodedInputStream input,
1173         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1174         throws java.io.IOException {
1175       return PARSER.parseFrom(input, extensionRegistry);
1176     }
1177 
newBuilder()1178     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1179     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse prototype)1180     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse prototype) {
1181       return newBuilder().mergeFrom(prototype);
1182     }
toBuilder()1183     public Builder toBuilder() { return newBuilder(this); }
1184 
1185     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1186     protected Builder newBuilderForType(
1187         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1188       Builder builder = new Builder(parent);
1189       return builder;
1190     }
1191     /**
1192      * Protobuf type {@code AddColumnResponse}
1193      */
1194     public static final class Builder extends
1195         com.google.protobuf.GeneratedMessage.Builder<Builder>
1196        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponseOrBuilder {
1197       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1198           getDescriptor() {
1199         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor;
1200       }
1201 
1202       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1203           internalGetFieldAccessorTable() {
1204         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_fieldAccessorTable
1205             .ensureFieldAccessorsInitialized(
1206                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.Builder.class);
1207       }
1208 
1209       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.newBuilder()
Builder()1210       private Builder() {
1211         maybeForceBuilderInitialization();
1212       }
1213 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1214       private Builder(
1215           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1216         super(parent);
1217         maybeForceBuilderInitialization();
1218       }
maybeForceBuilderInitialization()1219       private void maybeForceBuilderInitialization() {
1220         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1221         }
1222       }
create()1223       private static Builder create() {
1224         return new Builder();
1225       }
1226 
clear()1227       public Builder clear() {
1228         super.clear();
1229         return this;
1230       }
1231 
clone()1232       public Builder clone() {
1233         return create().mergeFrom(buildPartial());
1234       }
1235 
1236       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1237           getDescriptorForType() {
1238         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AddColumnResponse_descriptor;
1239       }
1240 
getDefaultInstanceForType()1241       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse getDefaultInstanceForType() {
1242         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance();
1243       }
1244 
build()1245       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse build() {
1246         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = buildPartial();
1247         if (!result.isInitialized()) {
1248           throw newUninitializedMessageException(result);
1249         }
1250         return result;
1251       }
1252 
buildPartial()1253       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse buildPartial() {
1254         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse(this);
1255         onBuilt();
1256         return result;
1257       }
1258 
mergeFrom(com.google.protobuf.Message other)1259       public Builder mergeFrom(com.google.protobuf.Message other) {
1260         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) {
1261           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse)other);
1262         } else {
1263           super.mergeFrom(other);
1264           return this;
1265         }
1266       }
1267 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other)1268       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse other) {
1269         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance()) return this;
1270         this.mergeUnknownFields(other.getUnknownFields());
1271         return this;
1272       }
1273 
isInitialized()1274       public final boolean isInitialized() {
1275         return true;
1276       }
1277 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1278       public Builder mergeFrom(
1279           com.google.protobuf.CodedInputStream input,
1280           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1281           throws java.io.IOException {
1282         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse parsedMessage = null;
1283         try {
1284           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1285         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1286           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) e.getUnfinishedMessage();
1287           throw e;
1288         } finally {
1289           if (parsedMessage != null) {
1290             mergeFrom(parsedMessage);
1291           }
1292         }
1293         return this;
1294       }
1295 
1296       // @@protoc_insertion_point(builder_scope:AddColumnResponse)
1297     }
1298 
1299     static {
1300       defaultInstance = new AddColumnResponse(true);
defaultInstance.initFields()1301       defaultInstance.initFields();
1302     }
1303 
1304     // @@protoc_insertion_point(class_scope:AddColumnResponse)
1305   }
1306 
1307   public interface DeleteColumnRequestOrBuilder
1308       extends com.google.protobuf.MessageOrBuilder {
1309 
1310     // required .TableName table_name = 1;
1311     /**
1312      * <code>required .TableName table_name = 1;</code>
1313      */
hasTableName()1314     boolean hasTableName();
1315     /**
1316      * <code>required .TableName table_name = 1;</code>
1317      */
getTableName()1318     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
1319     /**
1320      * <code>required .TableName table_name = 1;</code>
1321      */
getTableNameOrBuilder()1322     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
1323 
1324     // required bytes column_name = 2;
1325     /**
1326      * <code>required bytes column_name = 2;</code>
1327      */
hasColumnName()1328     boolean hasColumnName();
1329     /**
1330      * <code>required bytes column_name = 2;</code>
1331      */
getColumnName()1332     com.google.protobuf.ByteString getColumnName();
1333 
1334     // optional uint64 nonce_group = 3 [default = 0];
1335     /**
1336      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
1337      */
hasNonceGroup()1338     boolean hasNonceGroup();
1339     /**
1340      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
1341      */
getNonceGroup()1342     long getNonceGroup();
1343 
1344     // optional uint64 nonce = 4 [default = 0];
1345     /**
1346      * <code>optional uint64 nonce = 4 [default = 0];</code>
1347      */
hasNonce()1348     boolean hasNonce();
1349     /**
1350      * <code>optional uint64 nonce = 4 [default = 0];</code>
1351      */
getNonce()1352     long getNonce();
1353   }
1354   /**
1355    * Protobuf type {@code DeleteColumnRequest}
1356    */
1357   public static final class DeleteColumnRequest extends
1358       com.google.protobuf.GeneratedMessage
1359       implements DeleteColumnRequestOrBuilder {
1360     // Use DeleteColumnRequest.newBuilder() to construct.
DeleteColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)1361     private DeleteColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1362       super(builder);
1363       this.unknownFields = builder.getUnknownFields();
1364     }
DeleteColumnRequest(boolean noInit)1365     private DeleteColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1366 
1367     private static final DeleteColumnRequest defaultInstance;
getDefaultInstance()1368     public static DeleteColumnRequest getDefaultInstance() {
1369       return defaultInstance;
1370     }
1371 
getDefaultInstanceForType()1372     public DeleteColumnRequest getDefaultInstanceForType() {
1373       return defaultInstance;
1374     }
1375 
1376     private final com.google.protobuf.UnknownFieldSet unknownFields;
1377     @java.lang.Override
1378     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1379         getUnknownFields() {
1380       return this.unknownFields;
1381     }
DeleteColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1382     private DeleteColumnRequest(
1383         com.google.protobuf.CodedInputStream input,
1384         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1385         throws com.google.protobuf.InvalidProtocolBufferException {
1386       initFields();
1387       int mutable_bitField0_ = 0;
1388       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1389           com.google.protobuf.UnknownFieldSet.newBuilder();
1390       try {
1391         boolean done = false;
1392         while (!done) {
1393           int tag = input.readTag();
1394           switch (tag) {
1395             case 0:
1396               done = true;
1397               break;
1398             default: {
1399               if (!parseUnknownField(input, unknownFields,
1400                                      extensionRegistry, tag)) {
1401                 done = true;
1402               }
1403               break;
1404             }
1405             case 10: {
1406               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
1407               if (((bitField0_ & 0x00000001) == 0x00000001)) {
1408                 subBuilder = tableName_.toBuilder();
1409               }
1410               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
1411               if (subBuilder != null) {
1412                 subBuilder.mergeFrom(tableName_);
1413                 tableName_ = subBuilder.buildPartial();
1414               }
1415               bitField0_ |= 0x00000001;
1416               break;
1417             }
1418             case 18: {
1419               bitField0_ |= 0x00000002;
1420               columnName_ = input.readBytes();
1421               break;
1422             }
1423             case 24: {
1424               bitField0_ |= 0x00000004;
1425               nonceGroup_ = input.readUInt64();
1426               break;
1427             }
1428             case 32: {
1429               bitField0_ |= 0x00000008;
1430               nonce_ = input.readUInt64();
1431               break;
1432             }
1433           }
1434         }
1435       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1436         throw e.setUnfinishedMessage(this);
1437       } catch (java.io.IOException e) {
1438         throw new com.google.protobuf.InvalidProtocolBufferException(
1439             e.getMessage()).setUnfinishedMessage(this);
1440       } finally {
1441         this.unknownFields = unknownFields.build();
1442         makeExtensionsImmutable();
1443       }
1444     }
1445     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1446         getDescriptor() {
1447       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor;
1448     }
1449 
1450     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1451         internalGetFieldAccessorTable() {
1452       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_fieldAccessorTable
1453           .ensureFieldAccessorsInitialized(
1454               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class);
1455     }
1456 
1457     public static com.google.protobuf.Parser<DeleteColumnRequest> PARSER =
1458         new com.google.protobuf.AbstractParser<DeleteColumnRequest>() {
1459       public DeleteColumnRequest parsePartialFrom(
1460           com.google.protobuf.CodedInputStream input,
1461           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1462           throws com.google.protobuf.InvalidProtocolBufferException {
1463         return new DeleteColumnRequest(input, extensionRegistry);
1464       }
1465     };
1466 
1467     @java.lang.Override
getParserForType()1468     public com.google.protobuf.Parser<DeleteColumnRequest> getParserForType() {
1469       return PARSER;
1470     }
1471 
1472     private int bitField0_;
1473     // required .TableName table_name = 1;
1474     public static final int TABLE_NAME_FIELD_NUMBER = 1;
1475     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
1476     /**
1477      * <code>required .TableName table_name = 1;</code>
1478      */
hasTableName()1479     public boolean hasTableName() {
1480       return ((bitField0_ & 0x00000001) == 0x00000001);
1481     }
1482     /**
1483      * <code>required .TableName table_name = 1;</code>
1484      */
getTableName()1485     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
1486       return tableName_;
1487     }
1488     /**
1489      * <code>required .TableName table_name = 1;</code>
1490      */
getTableNameOrBuilder()1491     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
1492       return tableName_;
1493     }
1494 
1495     // required bytes column_name = 2;
1496     public static final int COLUMN_NAME_FIELD_NUMBER = 2;
1497     private com.google.protobuf.ByteString columnName_;
1498     /**
1499      * <code>required bytes column_name = 2;</code>
1500      */
hasColumnName()1501     public boolean hasColumnName() {
1502       return ((bitField0_ & 0x00000002) == 0x00000002);
1503     }
1504     /**
1505      * <code>required bytes column_name = 2;</code>
1506      */
getColumnName()1507     public com.google.protobuf.ByteString getColumnName() {
1508       return columnName_;
1509     }
1510 
1511     // optional uint64 nonce_group = 3 [default = 0];
1512     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
1513     private long nonceGroup_;
1514     /**
1515      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
1516      */
hasNonceGroup()1517     public boolean hasNonceGroup() {
1518       return ((bitField0_ & 0x00000004) == 0x00000004);
1519     }
1520     /**
1521      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
1522      */
getNonceGroup()1523     public long getNonceGroup() {
1524       return nonceGroup_;
1525     }
1526 
1527     // optional uint64 nonce = 4 [default = 0];
1528     public static final int NONCE_FIELD_NUMBER = 4;
1529     private long nonce_;
1530     /**
1531      * <code>optional uint64 nonce = 4 [default = 0];</code>
1532      */
hasNonce()1533     public boolean hasNonce() {
1534       return ((bitField0_ & 0x00000008) == 0x00000008);
1535     }
1536     /**
1537      * <code>optional uint64 nonce = 4 [default = 0];</code>
1538      */
getNonce()1539     public long getNonce() {
1540       return nonce_;
1541     }
1542 
initFields()1543     private void initFields() {
1544       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
1545       columnName_ = com.google.protobuf.ByteString.EMPTY;
1546       nonceGroup_ = 0L;
1547       nonce_ = 0L;
1548     }
1549     private byte memoizedIsInitialized = -1;
isInitialized()1550     public final boolean isInitialized() {
1551       byte isInitialized = memoizedIsInitialized;
1552       if (isInitialized != -1) return isInitialized == 1;
1553 
1554       if (!hasTableName()) {
1555         memoizedIsInitialized = 0;
1556         return false;
1557       }
1558       if (!hasColumnName()) {
1559         memoizedIsInitialized = 0;
1560         return false;
1561       }
1562       if (!getTableName().isInitialized()) {
1563         memoizedIsInitialized = 0;
1564         return false;
1565       }
1566       memoizedIsInitialized = 1;
1567       return true;
1568     }
1569 
writeTo(com.google.protobuf.CodedOutputStream output)1570     public void writeTo(com.google.protobuf.CodedOutputStream output)
1571                         throws java.io.IOException {
1572       getSerializedSize();
1573       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1574         output.writeMessage(1, tableName_);
1575       }
1576       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1577         output.writeBytes(2, columnName_);
1578       }
1579       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1580         output.writeUInt64(3, nonceGroup_);
1581       }
1582       if (((bitField0_ & 0x00000008) == 0x00000008)) {
1583         output.writeUInt64(4, nonce_);
1584       }
1585       getUnknownFields().writeTo(output);
1586     }
1587 
1588     private int memoizedSerializedSize = -1;
getSerializedSize()1589     public int getSerializedSize() {
1590       int size = memoizedSerializedSize;
1591       if (size != -1) return size;
1592 
1593       size = 0;
1594       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1595         size += com.google.protobuf.CodedOutputStream
1596           .computeMessageSize(1, tableName_);
1597       }
1598       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1599         size += com.google.protobuf.CodedOutputStream
1600           .computeBytesSize(2, columnName_);
1601       }
1602       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1603         size += com.google.protobuf.CodedOutputStream
1604           .computeUInt64Size(3, nonceGroup_);
1605       }
1606       if (((bitField0_ & 0x00000008) == 0x00000008)) {
1607         size += com.google.protobuf.CodedOutputStream
1608           .computeUInt64Size(4, nonce_);
1609       }
1610       size += getUnknownFields().getSerializedSize();
1611       memoizedSerializedSize = size;
1612       return size;
1613     }
1614 
1615     private static final long serialVersionUID = 0L;
1616     @java.lang.Override
writeReplace()1617     protected java.lang.Object writeReplace()
1618         throws java.io.ObjectStreamException {
1619       return super.writeReplace();
1620     }
1621 
1622     @java.lang.Override
equals(final java.lang.Object obj)1623     public boolean equals(final java.lang.Object obj) {
1624       if (obj == this) {
1625        return true;
1626       }
1627       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)) {
1628         return super.equals(obj);
1629       }
1630       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) obj;
1631 
1632       boolean result = true;
1633       result = result && (hasTableName() == other.hasTableName());
1634       if (hasTableName()) {
1635         result = result && getTableName()
1636             .equals(other.getTableName());
1637       }
1638       result = result && (hasColumnName() == other.hasColumnName());
1639       if (hasColumnName()) {
1640         result = result && getColumnName()
1641             .equals(other.getColumnName());
1642       }
1643       result = result && (hasNonceGroup() == other.hasNonceGroup());
1644       if (hasNonceGroup()) {
1645         result = result && (getNonceGroup()
1646             == other.getNonceGroup());
1647       }
1648       result = result && (hasNonce() == other.hasNonce());
1649       if (hasNonce()) {
1650         result = result && (getNonce()
1651             == other.getNonce());
1652       }
1653       result = result &&
1654           getUnknownFields().equals(other.getUnknownFields());
1655       return result;
1656     }
1657 
1658     private int memoizedHashCode = 0;
1659     @java.lang.Override
hashCode()1660     public int hashCode() {
1661       if (memoizedHashCode != 0) {
1662         return memoizedHashCode;
1663       }
1664       int hash = 41;
1665       hash = (19 * hash) + getDescriptorForType().hashCode();
1666       if (hasTableName()) {
1667         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
1668         hash = (53 * hash) + getTableName().hashCode();
1669       }
1670       if (hasColumnName()) {
1671         hash = (37 * hash) + COLUMN_NAME_FIELD_NUMBER;
1672         hash = (53 * hash) + getColumnName().hashCode();
1673       }
1674       if (hasNonceGroup()) {
1675         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
1676         hash = (53 * hash) + hashLong(getNonceGroup());
1677       }
1678       if (hasNonce()) {
1679         hash = (37 * hash) + NONCE_FIELD_NUMBER;
1680         hash = (53 * hash) + hashLong(getNonce());
1681       }
1682       hash = (29 * hash) + getUnknownFields().hashCode();
1683       memoizedHashCode = hash;
1684       return hash;
1685     }
1686 
parseFrom( com.google.protobuf.ByteString data)1687     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
1688         com.google.protobuf.ByteString data)
1689         throws com.google.protobuf.InvalidProtocolBufferException {
1690       return PARSER.parseFrom(data);
1691     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1692     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
1693         com.google.protobuf.ByteString data,
1694         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1695         throws com.google.protobuf.InvalidProtocolBufferException {
1696       return PARSER.parseFrom(data, extensionRegistry);
1697     }
parseFrom(byte[] data)1698     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(byte[] data)
1699         throws com.google.protobuf.InvalidProtocolBufferException {
1700       return PARSER.parseFrom(data);
1701     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1702     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
1703         byte[] data,
1704         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1705         throws com.google.protobuf.InvalidProtocolBufferException {
1706       return PARSER.parseFrom(data, extensionRegistry);
1707     }
parseFrom(java.io.InputStream input)1708     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(java.io.InputStream input)
1709         throws java.io.IOException {
1710       return PARSER.parseFrom(input);
1711     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1712     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
1713         java.io.InputStream input,
1714         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1715         throws java.io.IOException {
1716       return PARSER.parseFrom(input, extensionRegistry);
1717     }
parseDelimitedFrom(java.io.InputStream input)1718     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(java.io.InputStream input)
1719         throws java.io.IOException {
1720       return PARSER.parseDelimitedFrom(input);
1721     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1722     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseDelimitedFrom(
1723         java.io.InputStream input,
1724         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1725         throws java.io.IOException {
1726       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1727     }
parseFrom( com.google.protobuf.CodedInputStream input)1728     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
1729         com.google.protobuf.CodedInputStream input)
1730         throws java.io.IOException {
1731       return PARSER.parseFrom(input);
1732     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1733     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parseFrom(
1734         com.google.protobuf.CodedInputStream input,
1735         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1736         throws java.io.IOException {
1737       return PARSER.parseFrom(input, extensionRegistry);
1738     }
1739 
newBuilder()1740     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1741     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest prototype)1742     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest prototype) {
1743       return newBuilder().mergeFrom(prototype);
1744     }
toBuilder()1745     public Builder toBuilder() { return newBuilder(this); }
1746 
1747     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1748     protected Builder newBuilderForType(
1749         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1750       Builder builder = new Builder(parent);
1751       return builder;
1752     }
1753     /**
1754      * Protobuf type {@code DeleteColumnRequest}
1755      */
1756     public static final class Builder extends
1757         com.google.protobuf.GeneratedMessage.Builder<Builder>
1758        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequestOrBuilder {
1759       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1760           getDescriptor() {
1761         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor;
1762       }
1763 
1764       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1765           internalGetFieldAccessorTable() {
1766         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_fieldAccessorTable
1767             .ensureFieldAccessorsInitialized(
1768                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.Builder.class);
1769       }
1770 
1771       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.newBuilder()
Builder()1772       private Builder() {
1773         maybeForceBuilderInitialization();
1774       }
1775 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1776       private Builder(
1777           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1778         super(parent);
1779         maybeForceBuilderInitialization();
1780       }
maybeForceBuilderInitialization()1781       private void maybeForceBuilderInitialization() {
1782         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1783           getTableNameFieldBuilder();
1784         }
1785       }
create()1786       private static Builder create() {
1787         return new Builder();
1788       }
1789 
clear()1790       public Builder clear() {
1791         super.clear();
1792         if (tableNameBuilder_ == null) {
1793           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
1794         } else {
1795           tableNameBuilder_.clear();
1796         }
1797         bitField0_ = (bitField0_ & ~0x00000001);
1798         columnName_ = com.google.protobuf.ByteString.EMPTY;
1799         bitField0_ = (bitField0_ & ~0x00000002);
1800         nonceGroup_ = 0L;
1801         bitField0_ = (bitField0_ & ~0x00000004);
1802         nonce_ = 0L;
1803         bitField0_ = (bitField0_ & ~0x00000008);
1804         return this;
1805       }
1806 
clone()1807       public Builder clone() {
1808         return create().mergeFrom(buildPartial());
1809       }
1810 
1811       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1812           getDescriptorForType() {
1813         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnRequest_descriptor;
1814       }
1815 
getDefaultInstanceForType()1816       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest getDefaultInstanceForType() {
1817         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance();
1818       }
1819 
build()1820       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest build() {
1821         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = buildPartial();
1822         if (!result.isInitialized()) {
1823           throw newUninitializedMessageException(result);
1824         }
1825         return result;
1826       }
1827 
buildPartial()1828       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest buildPartial() {
1829         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest(this);
1830         int from_bitField0_ = bitField0_;
1831         int to_bitField0_ = 0;
1832         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1833           to_bitField0_ |= 0x00000001;
1834         }
1835         if (tableNameBuilder_ == null) {
1836           result.tableName_ = tableName_;
1837         } else {
1838           result.tableName_ = tableNameBuilder_.build();
1839         }
1840         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1841           to_bitField0_ |= 0x00000002;
1842         }
1843         result.columnName_ = columnName_;
1844         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1845           to_bitField0_ |= 0x00000004;
1846         }
1847         result.nonceGroup_ = nonceGroup_;
1848         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
1849           to_bitField0_ |= 0x00000008;
1850         }
1851         result.nonce_ = nonce_;
1852         result.bitField0_ = to_bitField0_;
1853         onBuilt();
1854         return result;
1855       }
1856 
mergeFrom(com.google.protobuf.Message other)1857       public Builder mergeFrom(com.google.protobuf.Message other) {
1858         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) {
1859           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)other);
1860         } else {
1861           super.mergeFrom(other);
1862           return this;
1863         }
1864       }
1865 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other)1866       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest other) {
1867         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance()) return this;
1868         if (other.hasTableName()) {
1869           mergeTableName(other.getTableName());
1870         }
1871         if (other.hasColumnName()) {
1872           setColumnName(other.getColumnName());
1873         }
1874         if (other.hasNonceGroup()) {
1875           setNonceGroup(other.getNonceGroup());
1876         }
1877         if (other.hasNonce()) {
1878           setNonce(other.getNonce());
1879         }
1880         this.mergeUnknownFields(other.getUnknownFields());
1881         return this;
1882       }
1883 
isInitialized()1884       public final boolean isInitialized() {
1885         if (!hasTableName()) {
1886 
1887           return false;
1888         }
1889         if (!hasColumnName()) {
1890 
1891           return false;
1892         }
1893         if (!getTableName().isInitialized()) {
1894 
1895           return false;
1896         }
1897         return true;
1898       }
1899 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1900       public Builder mergeFrom(
1901           com.google.protobuf.CodedInputStream input,
1902           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1903           throws java.io.IOException {
1904         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest parsedMessage = null;
1905         try {
1906           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1907         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1908           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest) e.getUnfinishedMessage();
1909           throw e;
1910         } finally {
1911           if (parsedMessage != null) {
1912             mergeFrom(parsedMessage);
1913           }
1914         }
1915         return this;
1916       }
1917       private int bitField0_;
1918 
1919       // required .TableName table_name = 1;
1920       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
1921       private com.google.protobuf.SingleFieldBuilder<
1922           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
1923       /**
1924        * <code>required .TableName table_name = 1;</code>
1925        */
hasTableName()1926       public boolean hasTableName() {
1927         return ((bitField0_ & 0x00000001) == 0x00000001);
1928       }
1929       /**
1930        * <code>required .TableName table_name = 1;</code>
1931        */
getTableName()1932       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
1933         if (tableNameBuilder_ == null) {
1934           return tableName_;
1935         } else {
1936           return tableNameBuilder_.getMessage();
1937         }
1938       }
1939       /**
1940        * <code>required .TableName table_name = 1;</code>
1941        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)1942       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
1943         if (tableNameBuilder_ == null) {
1944           if (value == null) {
1945             throw new NullPointerException();
1946           }
1947           tableName_ = value;
1948           onChanged();
1949         } else {
1950           tableNameBuilder_.setMessage(value);
1951         }
1952         bitField0_ |= 0x00000001;
1953         return this;
1954       }
1955       /**
1956        * <code>required .TableName table_name = 1;</code>
1957        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)1958       public Builder setTableName(
1959           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
1960         if (tableNameBuilder_ == null) {
1961           tableName_ = builderForValue.build();
1962           onChanged();
1963         } else {
1964           tableNameBuilder_.setMessage(builderForValue.build());
1965         }
1966         bitField0_ |= 0x00000001;
1967         return this;
1968       }
1969       /**
1970        * <code>required .TableName table_name = 1;</code>
1971        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)1972       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
1973         if (tableNameBuilder_ == null) {
1974           if (((bitField0_ & 0x00000001) == 0x00000001) &&
1975               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
1976             tableName_ =
1977               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
1978           } else {
1979             tableName_ = value;
1980           }
1981           onChanged();
1982         } else {
1983           tableNameBuilder_.mergeFrom(value);
1984         }
1985         bitField0_ |= 0x00000001;
1986         return this;
1987       }
1988       /**
1989        * <code>required .TableName table_name = 1;</code>
1990        */
clearTableName()1991       public Builder clearTableName() {
1992         if (tableNameBuilder_ == null) {
1993           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
1994           onChanged();
1995         } else {
1996           tableNameBuilder_.clear();
1997         }
1998         bitField0_ = (bitField0_ & ~0x00000001);
1999         return this;
2000       }
2001       /**
2002        * <code>required .TableName table_name = 1;</code>
2003        */
getTableNameBuilder()2004       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
2005         bitField0_ |= 0x00000001;
2006         onChanged();
2007         return getTableNameFieldBuilder().getBuilder();
2008       }
2009       /**
2010        * <code>required .TableName table_name = 1;</code>
2011        */
getTableNameOrBuilder()2012       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
2013         if (tableNameBuilder_ != null) {
2014           return tableNameBuilder_.getMessageOrBuilder();
2015         } else {
2016           return tableName_;
2017         }
2018       }
2019       /**
2020        * <code>required .TableName table_name = 1;</code>
2021        */
2022       private com.google.protobuf.SingleFieldBuilder<
2023           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()2024           getTableNameFieldBuilder() {
2025         if (tableNameBuilder_ == null) {
2026           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2027               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
2028                   tableName_,
2029                   getParentForChildren(),
2030                   isClean());
2031           tableName_ = null;
2032         }
2033         return tableNameBuilder_;
2034       }
2035 
2036       // required bytes column_name = 2;
2037       private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY;
2038       /**
2039        * <code>required bytes column_name = 2;</code>
2040        */
hasColumnName()2041       public boolean hasColumnName() {
2042         return ((bitField0_ & 0x00000002) == 0x00000002);
2043       }
2044       /**
2045        * <code>required bytes column_name = 2;</code>
2046        */
getColumnName()2047       public com.google.protobuf.ByteString getColumnName() {
2048         return columnName_;
2049       }
2050       /**
2051        * <code>required bytes column_name = 2;</code>
2052        */
setColumnName(com.google.protobuf.ByteString value)2053       public Builder setColumnName(com.google.protobuf.ByteString value) {
2054         if (value == null) {
2055     throw new NullPointerException();
2056   }
2057   bitField0_ |= 0x00000002;
2058         columnName_ = value;
2059         onChanged();
2060         return this;
2061       }
2062       /**
2063        * <code>required bytes column_name = 2;</code>
2064        */
clearColumnName()2065       public Builder clearColumnName() {
2066         bitField0_ = (bitField0_ & ~0x00000002);
2067         columnName_ = getDefaultInstance().getColumnName();
2068         onChanged();
2069         return this;
2070       }
2071 
2072       // optional uint64 nonce_group = 3 [default = 0];
2073       private long nonceGroup_ ;
2074       /**
2075        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2076        */
hasNonceGroup()2077       public boolean hasNonceGroup() {
2078         return ((bitField0_ & 0x00000004) == 0x00000004);
2079       }
2080       /**
2081        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2082        */
getNonceGroup()2083       public long getNonceGroup() {
2084         return nonceGroup_;
2085       }
2086       /**
2087        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2088        */
setNonceGroup(long value)2089       public Builder setNonceGroup(long value) {
2090         bitField0_ |= 0x00000004;
2091         nonceGroup_ = value;
2092         onChanged();
2093         return this;
2094       }
2095       /**
2096        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2097        */
clearNonceGroup()2098       public Builder clearNonceGroup() {
2099         bitField0_ = (bitField0_ & ~0x00000004);
2100         nonceGroup_ = 0L;
2101         onChanged();
2102         return this;
2103       }
2104 
2105       // optional uint64 nonce = 4 [default = 0];
2106       private long nonce_ ;
2107       /**
2108        * <code>optional uint64 nonce = 4 [default = 0];</code>
2109        */
hasNonce()2110       public boolean hasNonce() {
2111         return ((bitField0_ & 0x00000008) == 0x00000008);
2112       }
2113       /**
2114        * <code>optional uint64 nonce = 4 [default = 0];</code>
2115        */
getNonce()2116       public long getNonce() {
2117         return nonce_;
2118       }
2119       /**
2120        * <code>optional uint64 nonce = 4 [default = 0];</code>
2121        */
setNonce(long value)2122       public Builder setNonce(long value) {
2123         bitField0_ |= 0x00000008;
2124         nonce_ = value;
2125         onChanged();
2126         return this;
2127       }
2128       /**
2129        * <code>optional uint64 nonce = 4 [default = 0];</code>
2130        */
clearNonce()2131       public Builder clearNonce() {
2132         bitField0_ = (bitField0_ & ~0x00000008);
2133         nonce_ = 0L;
2134         onChanged();
2135         return this;
2136       }
2137 
2138       // @@protoc_insertion_point(builder_scope:DeleteColumnRequest)
2139     }
2140 
2141     static {
2142       defaultInstance = new DeleteColumnRequest(true);
defaultInstance.initFields()2143       defaultInstance.initFields();
2144     }
2145 
2146     // @@protoc_insertion_point(class_scope:DeleteColumnRequest)
2147   }
2148 
2149   public interface DeleteColumnResponseOrBuilder
2150       extends com.google.protobuf.MessageOrBuilder {
2151   }
2152   /**
2153    * Protobuf type {@code DeleteColumnResponse}
2154    */
2155   public static final class DeleteColumnResponse extends
2156       com.google.protobuf.GeneratedMessage
2157       implements DeleteColumnResponseOrBuilder {
2158     // Use DeleteColumnResponse.newBuilder() to construct.
DeleteColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)2159     private DeleteColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2160       super(builder);
2161       this.unknownFields = builder.getUnknownFields();
2162     }
DeleteColumnResponse(boolean noInit)2163     private DeleteColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2164 
2165     private static final DeleteColumnResponse defaultInstance;
getDefaultInstance()2166     public static DeleteColumnResponse getDefaultInstance() {
2167       return defaultInstance;
2168     }
2169 
getDefaultInstanceForType()2170     public DeleteColumnResponse getDefaultInstanceForType() {
2171       return defaultInstance;
2172     }
2173 
2174     private final com.google.protobuf.UnknownFieldSet unknownFields;
2175     @java.lang.Override
2176     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2177         getUnknownFields() {
2178       return this.unknownFields;
2179     }
DeleteColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2180     private DeleteColumnResponse(
2181         com.google.protobuf.CodedInputStream input,
2182         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2183         throws com.google.protobuf.InvalidProtocolBufferException {
2184       initFields();
2185       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2186           com.google.protobuf.UnknownFieldSet.newBuilder();
2187       try {
2188         boolean done = false;
2189         while (!done) {
2190           int tag = input.readTag();
2191           switch (tag) {
2192             case 0:
2193               done = true;
2194               break;
2195             default: {
2196               if (!parseUnknownField(input, unknownFields,
2197                                      extensionRegistry, tag)) {
2198                 done = true;
2199               }
2200               break;
2201             }
2202           }
2203         }
2204       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2205         throw e.setUnfinishedMessage(this);
2206       } catch (java.io.IOException e) {
2207         throw new com.google.protobuf.InvalidProtocolBufferException(
2208             e.getMessage()).setUnfinishedMessage(this);
2209       } finally {
2210         this.unknownFields = unknownFields.build();
2211         makeExtensionsImmutable();
2212       }
2213     }
2214     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2215         getDescriptor() {
2216       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor;
2217     }
2218 
2219     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2220         internalGetFieldAccessorTable() {
2221       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_fieldAccessorTable
2222           .ensureFieldAccessorsInitialized(
2223               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class);
2224     }
2225 
2226     public static com.google.protobuf.Parser<DeleteColumnResponse> PARSER =
2227         new com.google.protobuf.AbstractParser<DeleteColumnResponse>() {
2228       public DeleteColumnResponse parsePartialFrom(
2229           com.google.protobuf.CodedInputStream input,
2230           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2231           throws com.google.protobuf.InvalidProtocolBufferException {
2232         return new DeleteColumnResponse(input, extensionRegistry);
2233       }
2234     };
2235 
2236     @java.lang.Override
getParserForType()2237     public com.google.protobuf.Parser<DeleteColumnResponse> getParserForType() {
2238       return PARSER;
2239     }
2240 
initFields()2241     private void initFields() {
2242     }
2243     private byte memoizedIsInitialized = -1;
isInitialized()2244     public final boolean isInitialized() {
2245       byte isInitialized = memoizedIsInitialized;
2246       if (isInitialized != -1) return isInitialized == 1;
2247 
2248       memoizedIsInitialized = 1;
2249       return true;
2250     }
2251 
writeTo(com.google.protobuf.CodedOutputStream output)2252     public void writeTo(com.google.protobuf.CodedOutputStream output)
2253                         throws java.io.IOException {
2254       getSerializedSize();
2255       getUnknownFields().writeTo(output);
2256     }
2257 
2258     private int memoizedSerializedSize = -1;
getSerializedSize()2259     public int getSerializedSize() {
2260       int size = memoizedSerializedSize;
2261       if (size != -1) return size;
2262 
2263       size = 0;
2264       size += getUnknownFields().getSerializedSize();
2265       memoizedSerializedSize = size;
2266       return size;
2267     }
2268 
2269     private static final long serialVersionUID = 0L;
2270     @java.lang.Override
writeReplace()2271     protected java.lang.Object writeReplace()
2272         throws java.io.ObjectStreamException {
2273       return super.writeReplace();
2274     }
2275 
2276     @java.lang.Override
equals(final java.lang.Object obj)2277     public boolean equals(final java.lang.Object obj) {
2278       if (obj == this) {
2279        return true;
2280       }
2281       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse)) {
2282         return super.equals(obj);
2283       }
2284       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) obj;
2285 
2286       boolean result = true;
2287       result = result &&
2288           getUnknownFields().equals(other.getUnknownFields());
2289       return result;
2290     }
2291 
2292     private int memoizedHashCode = 0;
2293     @java.lang.Override
hashCode()2294     public int hashCode() {
2295       if (memoizedHashCode != 0) {
2296         return memoizedHashCode;
2297       }
2298       int hash = 41;
2299       hash = (19 * hash) + getDescriptorForType().hashCode();
2300       hash = (29 * hash) + getUnknownFields().hashCode();
2301       memoizedHashCode = hash;
2302       return hash;
2303     }
2304 
parseFrom( com.google.protobuf.ByteString data)2305     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
2306         com.google.protobuf.ByteString data)
2307         throws com.google.protobuf.InvalidProtocolBufferException {
2308       return PARSER.parseFrom(data);
2309     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2310     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
2311         com.google.protobuf.ByteString data,
2312         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2313         throws com.google.protobuf.InvalidProtocolBufferException {
2314       return PARSER.parseFrom(data, extensionRegistry);
2315     }
parseFrom(byte[] data)2316     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(byte[] data)
2317         throws com.google.protobuf.InvalidProtocolBufferException {
2318       return PARSER.parseFrom(data);
2319     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2320     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
2321         byte[] data,
2322         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2323         throws com.google.protobuf.InvalidProtocolBufferException {
2324       return PARSER.parseFrom(data, extensionRegistry);
2325     }
parseFrom(java.io.InputStream input)2326     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(java.io.InputStream input)
2327         throws java.io.IOException {
2328       return PARSER.parseFrom(input);
2329     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2330     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
2331         java.io.InputStream input,
2332         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2333         throws java.io.IOException {
2334       return PARSER.parseFrom(input, extensionRegistry);
2335     }
parseDelimitedFrom(java.io.InputStream input)2336     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(java.io.InputStream input)
2337         throws java.io.IOException {
2338       return PARSER.parseDelimitedFrom(input);
2339     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2340     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseDelimitedFrom(
2341         java.io.InputStream input,
2342         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2343         throws java.io.IOException {
2344       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2345     }
parseFrom( com.google.protobuf.CodedInputStream input)2346     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
2347         com.google.protobuf.CodedInputStream input)
2348         throws java.io.IOException {
2349       return PARSER.parseFrom(input);
2350     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2351     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parseFrom(
2352         com.google.protobuf.CodedInputStream input,
2353         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2354         throws java.io.IOException {
2355       return PARSER.parseFrom(input, extensionRegistry);
2356     }
2357 
newBuilder()2358     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2359     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse prototype)2360     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse prototype) {
2361       return newBuilder().mergeFrom(prototype);
2362     }
toBuilder()2363     public Builder toBuilder() { return newBuilder(this); }
2364 
2365     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2366     protected Builder newBuilderForType(
2367         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2368       Builder builder = new Builder(parent);
2369       return builder;
2370     }
2371     /**
2372      * Protobuf type {@code DeleteColumnResponse}
2373      */
2374     public static final class Builder extends
2375         com.google.protobuf.GeneratedMessage.Builder<Builder>
2376        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponseOrBuilder {
2377       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2378           getDescriptor() {
2379         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor;
2380       }
2381 
2382       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2383           internalGetFieldAccessorTable() {
2384         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_fieldAccessorTable
2385             .ensureFieldAccessorsInitialized(
2386                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.Builder.class);
2387       }
2388 
2389       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.newBuilder()
Builder()2390       private Builder() {
2391         maybeForceBuilderInitialization();
2392       }
2393 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2394       private Builder(
2395           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2396         super(parent);
2397         maybeForceBuilderInitialization();
2398       }
maybeForceBuilderInitialization()2399       private void maybeForceBuilderInitialization() {
2400         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2401         }
2402       }
create()2403       private static Builder create() {
2404         return new Builder();
2405       }
2406 
clear()2407       public Builder clear() {
2408         super.clear();
2409         return this;
2410       }
2411 
clone()2412       public Builder clone() {
2413         return create().mergeFrom(buildPartial());
2414       }
2415 
2416       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()2417           getDescriptorForType() {
2418         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteColumnResponse_descriptor;
2419       }
2420 
getDefaultInstanceForType()2421       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse getDefaultInstanceForType() {
2422         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance();
2423       }
2424 
build()2425       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse build() {
2426         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = buildPartial();
2427         if (!result.isInitialized()) {
2428           throw newUninitializedMessageException(result);
2429         }
2430         return result;
2431       }
2432 
buildPartial()2433       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse buildPartial() {
2434         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse(this);
2435         onBuilt();
2436         return result;
2437       }
2438 
mergeFrom(com.google.protobuf.Message other)2439       public Builder mergeFrom(com.google.protobuf.Message other) {
2440         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) {
2441           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse)other);
2442         } else {
2443           super.mergeFrom(other);
2444           return this;
2445         }
2446       }
2447 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other)2448       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse other) {
2449         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance()) return this;
2450         this.mergeUnknownFields(other.getUnknownFields());
2451         return this;
2452       }
2453 
isInitialized()2454       public final boolean isInitialized() {
2455         return true;
2456       }
2457 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2458       public Builder mergeFrom(
2459           com.google.protobuf.CodedInputStream input,
2460           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2461           throws java.io.IOException {
2462         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse parsedMessage = null;
2463         try {
2464           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2465         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2466           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) e.getUnfinishedMessage();
2467           throw e;
2468         } finally {
2469           if (parsedMessage != null) {
2470             mergeFrom(parsedMessage);
2471           }
2472         }
2473         return this;
2474       }
2475 
2476       // @@protoc_insertion_point(builder_scope:DeleteColumnResponse)
2477     }
2478 
2479     static {
2480       defaultInstance = new DeleteColumnResponse(true);
defaultInstance.initFields()2481       defaultInstance.initFields();
2482     }
2483 
2484     // @@protoc_insertion_point(class_scope:DeleteColumnResponse)
2485   }
2486 
2487   public interface ModifyColumnRequestOrBuilder
2488       extends com.google.protobuf.MessageOrBuilder {
2489 
2490     // required .TableName table_name = 1;
2491     /**
2492      * <code>required .TableName table_name = 1;</code>
2493      */
hasTableName()2494     boolean hasTableName();
2495     /**
2496      * <code>required .TableName table_name = 1;</code>
2497      */
getTableName()2498     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
2499     /**
2500      * <code>required .TableName table_name = 1;</code>
2501      */
getTableNameOrBuilder()2502     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
2503 
2504     // required .ColumnFamilySchema column_families = 2;
2505     /**
2506      * <code>required .ColumnFamilySchema column_families = 2;</code>
2507      */
hasColumnFamilies()2508     boolean hasColumnFamilies();
2509     /**
2510      * <code>required .ColumnFamilySchema column_families = 2;</code>
2511      */
getColumnFamilies()2512     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies();
2513     /**
2514      * <code>required .ColumnFamilySchema column_families = 2;</code>
2515      */
getColumnFamiliesOrBuilder()2516     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder();
2517 
2518     // optional uint64 nonce_group = 3 [default = 0];
2519     /**
2520      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2521      */
hasNonceGroup()2522     boolean hasNonceGroup();
2523     /**
2524      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2525      */
getNonceGroup()2526     long getNonceGroup();
2527 
2528     // optional uint64 nonce = 4 [default = 0];
2529     /**
2530      * <code>optional uint64 nonce = 4 [default = 0];</code>
2531      */
hasNonce()2532     boolean hasNonce();
2533     /**
2534      * <code>optional uint64 nonce = 4 [default = 0];</code>
2535      */
getNonce()2536     long getNonce();
2537   }
2538   /**
2539    * Protobuf type {@code ModifyColumnRequest}
2540    */
2541   public static final class ModifyColumnRequest extends
2542       com.google.protobuf.GeneratedMessage
2543       implements ModifyColumnRequestOrBuilder {
2544     // Use ModifyColumnRequest.newBuilder() to construct.
ModifyColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)2545     private ModifyColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2546       super(builder);
2547       this.unknownFields = builder.getUnknownFields();
2548     }
ModifyColumnRequest(boolean noInit)2549     private ModifyColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2550 
2551     private static final ModifyColumnRequest defaultInstance;
getDefaultInstance()2552     public static ModifyColumnRequest getDefaultInstance() {
2553       return defaultInstance;
2554     }
2555 
getDefaultInstanceForType()2556     public ModifyColumnRequest getDefaultInstanceForType() {
2557       return defaultInstance;
2558     }
2559 
2560     private final com.google.protobuf.UnknownFieldSet unknownFields;
2561     @java.lang.Override
2562     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2563         getUnknownFields() {
2564       return this.unknownFields;
2565     }
ModifyColumnRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2566     private ModifyColumnRequest(
2567         com.google.protobuf.CodedInputStream input,
2568         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2569         throws com.google.protobuf.InvalidProtocolBufferException {
2570       initFields();
2571       int mutable_bitField0_ = 0;
2572       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2573           com.google.protobuf.UnknownFieldSet.newBuilder();
2574       try {
2575         boolean done = false;
2576         while (!done) {
2577           int tag = input.readTag();
2578           switch (tag) {
2579             case 0:
2580               done = true;
2581               break;
2582             default: {
2583               if (!parseUnknownField(input, unknownFields,
2584                                      extensionRegistry, tag)) {
2585                 done = true;
2586               }
2587               break;
2588             }
2589             case 10: {
2590               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
2591               if (((bitField0_ & 0x00000001) == 0x00000001)) {
2592                 subBuilder = tableName_.toBuilder();
2593               }
2594               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
2595               if (subBuilder != null) {
2596                 subBuilder.mergeFrom(tableName_);
2597                 tableName_ = subBuilder.buildPartial();
2598               }
2599               bitField0_ |= 0x00000001;
2600               break;
2601             }
2602             case 18: {
2603               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null;
2604               if (((bitField0_ & 0x00000002) == 0x00000002)) {
2605                 subBuilder = columnFamilies_.toBuilder();
2606               }
2607               columnFamilies_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry);
2608               if (subBuilder != null) {
2609                 subBuilder.mergeFrom(columnFamilies_);
2610                 columnFamilies_ = subBuilder.buildPartial();
2611               }
2612               bitField0_ |= 0x00000002;
2613               break;
2614             }
2615             case 24: {
2616               bitField0_ |= 0x00000004;
2617               nonceGroup_ = input.readUInt64();
2618               break;
2619             }
2620             case 32: {
2621               bitField0_ |= 0x00000008;
2622               nonce_ = input.readUInt64();
2623               break;
2624             }
2625           }
2626         }
2627       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2628         throw e.setUnfinishedMessage(this);
2629       } catch (java.io.IOException e) {
2630         throw new com.google.protobuf.InvalidProtocolBufferException(
2631             e.getMessage()).setUnfinishedMessage(this);
2632       } finally {
2633         this.unknownFields = unknownFields.build();
2634         makeExtensionsImmutable();
2635       }
2636     }
2637     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2638         getDescriptor() {
2639       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor;
2640     }
2641 
2642     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2643         internalGetFieldAccessorTable() {
2644       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_fieldAccessorTable
2645           .ensureFieldAccessorsInitialized(
2646               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.Builder.class);
2647     }
2648 
2649     public static com.google.protobuf.Parser<ModifyColumnRequest> PARSER =
2650         new com.google.protobuf.AbstractParser<ModifyColumnRequest>() {
2651       public ModifyColumnRequest parsePartialFrom(
2652           com.google.protobuf.CodedInputStream input,
2653           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2654           throws com.google.protobuf.InvalidProtocolBufferException {
2655         return new ModifyColumnRequest(input, extensionRegistry);
2656       }
2657     };
2658 
2659     @java.lang.Override
getParserForType()2660     public com.google.protobuf.Parser<ModifyColumnRequest> getParserForType() {
2661       return PARSER;
2662     }
2663 
2664     private int bitField0_;
2665     // required .TableName table_name = 1;
2666     public static final int TABLE_NAME_FIELD_NUMBER = 1;
2667     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
2668     /**
2669      * <code>required .TableName table_name = 1;</code>
2670      */
hasTableName()2671     public boolean hasTableName() {
2672       return ((bitField0_ & 0x00000001) == 0x00000001);
2673     }
2674     /**
2675      * <code>required .TableName table_name = 1;</code>
2676      */
getTableName()2677     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
2678       return tableName_;
2679     }
2680     /**
2681      * <code>required .TableName table_name = 1;</code>
2682      */
getTableNameOrBuilder()2683     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
2684       return tableName_;
2685     }
2686 
2687     // required .ColumnFamilySchema column_families = 2;
2688     public static final int COLUMN_FAMILIES_FIELD_NUMBER = 2;
2689     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_;
2690     /**
2691      * <code>required .ColumnFamilySchema column_families = 2;</code>
2692      */
hasColumnFamilies()2693     public boolean hasColumnFamilies() {
2694       return ((bitField0_ & 0x00000002) == 0x00000002);
2695     }
2696     /**
2697      * <code>required .ColumnFamilySchema column_families = 2;</code>
2698      */
getColumnFamilies()2699     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() {
2700       return columnFamilies_;
2701     }
2702     /**
2703      * <code>required .ColumnFamilySchema column_families = 2;</code>
2704      */
getColumnFamiliesOrBuilder()2705     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() {
2706       return columnFamilies_;
2707     }
2708 
2709     // optional uint64 nonce_group = 3 [default = 0];
2710     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
2711     private long nonceGroup_;
2712     /**
2713      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2714      */
hasNonceGroup()2715     public boolean hasNonceGroup() {
2716       return ((bitField0_ & 0x00000004) == 0x00000004);
2717     }
2718     /**
2719      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
2720      */
getNonceGroup()2721     public long getNonceGroup() {
2722       return nonceGroup_;
2723     }
2724 
2725     // optional uint64 nonce = 4 [default = 0];
2726     public static final int NONCE_FIELD_NUMBER = 4;
2727     private long nonce_;
2728     /**
2729      * <code>optional uint64 nonce = 4 [default = 0];</code>
2730      */
hasNonce()2731     public boolean hasNonce() {
2732       return ((bitField0_ & 0x00000008) == 0x00000008);
2733     }
2734     /**
2735      * <code>optional uint64 nonce = 4 [default = 0];</code>
2736      */
getNonce()2737     public long getNonce() {
2738       return nonce_;
2739     }
2740 
initFields()2741     private void initFields() {
2742       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
2743       columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
2744       nonceGroup_ = 0L;
2745       nonce_ = 0L;
2746     }
2747     private byte memoizedIsInitialized = -1;
isInitialized()2748     public final boolean isInitialized() {
2749       byte isInitialized = memoizedIsInitialized;
2750       if (isInitialized != -1) return isInitialized == 1;
2751 
2752       if (!hasTableName()) {
2753         memoizedIsInitialized = 0;
2754         return false;
2755       }
2756       if (!hasColumnFamilies()) {
2757         memoizedIsInitialized = 0;
2758         return false;
2759       }
2760       if (!getTableName().isInitialized()) {
2761         memoizedIsInitialized = 0;
2762         return false;
2763       }
2764       if (!getColumnFamilies().isInitialized()) {
2765         memoizedIsInitialized = 0;
2766         return false;
2767       }
2768       memoizedIsInitialized = 1;
2769       return true;
2770     }
2771 
writeTo(com.google.protobuf.CodedOutputStream output)2772     public void writeTo(com.google.protobuf.CodedOutputStream output)
2773                         throws java.io.IOException {
2774       getSerializedSize();
2775       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2776         output.writeMessage(1, tableName_);
2777       }
2778       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2779         output.writeMessage(2, columnFamilies_);
2780       }
2781       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2782         output.writeUInt64(3, nonceGroup_);
2783       }
2784       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2785         output.writeUInt64(4, nonce_);
2786       }
2787       getUnknownFields().writeTo(output);
2788     }
2789 
2790     private int memoizedSerializedSize = -1;
getSerializedSize()2791     public int getSerializedSize() {
2792       int size = memoizedSerializedSize;
2793       if (size != -1) return size;
2794 
2795       size = 0;
2796       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2797         size += com.google.protobuf.CodedOutputStream
2798           .computeMessageSize(1, tableName_);
2799       }
2800       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2801         size += com.google.protobuf.CodedOutputStream
2802           .computeMessageSize(2, columnFamilies_);
2803       }
2804       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2805         size += com.google.protobuf.CodedOutputStream
2806           .computeUInt64Size(3, nonceGroup_);
2807       }
2808       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2809         size += com.google.protobuf.CodedOutputStream
2810           .computeUInt64Size(4, nonce_);
2811       }
2812       size += getUnknownFields().getSerializedSize();
2813       memoizedSerializedSize = size;
2814       return size;
2815     }
2816 
2817     private static final long serialVersionUID = 0L;
2818     @java.lang.Override
writeReplace()2819     protected java.lang.Object writeReplace()
2820         throws java.io.ObjectStreamException {
2821       return super.writeReplace();
2822     }
2823 
2824     @java.lang.Override
equals(final java.lang.Object obj)2825     public boolean equals(final java.lang.Object obj) {
2826       if (obj == this) {
2827        return true;
2828       }
2829       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)) {
2830         return super.equals(obj);
2831       }
2832       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) obj;
2833 
2834       boolean result = true;
2835       result = result && (hasTableName() == other.hasTableName());
2836       if (hasTableName()) {
2837         result = result && getTableName()
2838             .equals(other.getTableName());
2839       }
2840       result = result && (hasColumnFamilies() == other.hasColumnFamilies());
2841       if (hasColumnFamilies()) {
2842         result = result && getColumnFamilies()
2843             .equals(other.getColumnFamilies());
2844       }
2845       result = result && (hasNonceGroup() == other.hasNonceGroup());
2846       if (hasNonceGroup()) {
2847         result = result && (getNonceGroup()
2848             == other.getNonceGroup());
2849       }
2850       result = result && (hasNonce() == other.hasNonce());
2851       if (hasNonce()) {
2852         result = result && (getNonce()
2853             == other.getNonce());
2854       }
2855       result = result &&
2856           getUnknownFields().equals(other.getUnknownFields());
2857       return result;
2858     }
2859 
2860     private int memoizedHashCode = 0;
2861     @java.lang.Override
hashCode()2862     public int hashCode() {
2863       if (memoizedHashCode != 0) {
2864         return memoizedHashCode;
2865       }
2866       int hash = 41;
2867       hash = (19 * hash) + getDescriptorForType().hashCode();
2868       if (hasTableName()) {
2869         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
2870         hash = (53 * hash) + getTableName().hashCode();
2871       }
2872       if (hasColumnFamilies()) {
2873         hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER;
2874         hash = (53 * hash) + getColumnFamilies().hashCode();
2875       }
2876       if (hasNonceGroup()) {
2877         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
2878         hash = (53 * hash) + hashLong(getNonceGroup());
2879       }
2880       if (hasNonce()) {
2881         hash = (37 * hash) + NONCE_FIELD_NUMBER;
2882         hash = (53 * hash) + hashLong(getNonce());
2883       }
2884       hash = (29 * hash) + getUnknownFields().hashCode();
2885       memoizedHashCode = hash;
2886       return hash;
2887     }
2888 
parseFrom( com.google.protobuf.ByteString data)2889     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(
2890         com.google.protobuf.ByteString data)
2891         throws com.google.protobuf.InvalidProtocolBufferException {
2892       return PARSER.parseFrom(data);
2893     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2894     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(
2895         com.google.protobuf.ByteString data,
2896         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2897         throws com.google.protobuf.InvalidProtocolBufferException {
2898       return PARSER.parseFrom(data, extensionRegistry);
2899     }
parseFrom(byte[] data)2900     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(byte[] data)
2901         throws com.google.protobuf.InvalidProtocolBufferException {
2902       return PARSER.parseFrom(data);
2903     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2904     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(
2905         byte[] data,
2906         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2907         throws com.google.protobuf.InvalidProtocolBufferException {
2908       return PARSER.parseFrom(data, extensionRegistry);
2909     }
parseFrom(java.io.InputStream input)2910     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(java.io.InputStream input)
2911         throws java.io.IOException {
2912       return PARSER.parseFrom(input);
2913     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2914     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(
2915         java.io.InputStream input,
2916         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2917         throws java.io.IOException {
2918       return PARSER.parseFrom(input, extensionRegistry);
2919     }
parseDelimitedFrom(java.io.InputStream input)2920     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom(java.io.InputStream input)
2921         throws java.io.IOException {
2922       return PARSER.parseDelimitedFrom(input);
2923     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2924     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseDelimitedFrom(
2925         java.io.InputStream input,
2926         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2927         throws java.io.IOException {
2928       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2929     }
parseFrom( com.google.protobuf.CodedInputStream input)2930     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(
2931         com.google.protobuf.CodedInputStream input)
2932         throws java.io.IOException {
2933       return PARSER.parseFrom(input);
2934     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2935     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parseFrom(
2936         com.google.protobuf.CodedInputStream input,
2937         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2938         throws java.io.IOException {
2939       return PARSER.parseFrom(input, extensionRegistry);
2940     }
2941 
newBuilder()2942     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2943     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest prototype)2944     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest prototype) {
2945       return newBuilder().mergeFrom(prototype);
2946     }
toBuilder()2947     public Builder toBuilder() { return newBuilder(this); }
2948 
2949     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2950     protected Builder newBuilderForType(
2951         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2952       Builder builder = new Builder(parent);
2953       return builder;
2954     }
2955     /**
2956      * Protobuf type {@code ModifyColumnRequest}
2957      */
2958     public static final class Builder extends
2959         com.google.protobuf.GeneratedMessage.Builder<Builder>
2960        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequestOrBuilder {
2961       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2962           getDescriptor() {
2963         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor;
2964       }
2965 
2966       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2967           internalGetFieldAccessorTable() {
2968         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_fieldAccessorTable
2969             .ensureFieldAccessorsInitialized(
2970                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.Builder.class);
2971       }
2972 
2973       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.newBuilder()
Builder()2974       private Builder() {
2975         maybeForceBuilderInitialization();
2976       }
2977 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2978       private Builder(
2979           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2980         super(parent);
2981         maybeForceBuilderInitialization();
2982       }
maybeForceBuilderInitialization()2983       private void maybeForceBuilderInitialization() {
2984         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2985           getTableNameFieldBuilder();
2986           getColumnFamiliesFieldBuilder();
2987         }
2988       }
create()2989       private static Builder create() {
2990         return new Builder();
2991       }
2992 
clear()2993       public Builder clear() {
2994         super.clear();
2995         if (tableNameBuilder_ == null) {
2996           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
2997         } else {
2998           tableNameBuilder_.clear();
2999         }
3000         bitField0_ = (bitField0_ & ~0x00000001);
3001         if (columnFamiliesBuilder_ == null) {
3002           columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
3003         } else {
3004           columnFamiliesBuilder_.clear();
3005         }
3006         bitField0_ = (bitField0_ & ~0x00000002);
3007         nonceGroup_ = 0L;
3008         bitField0_ = (bitField0_ & ~0x00000004);
3009         nonce_ = 0L;
3010         bitField0_ = (bitField0_ & ~0x00000008);
3011         return this;
3012       }
3013 
clone()3014       public Builder clone() {
3015         return create().mergeFrom(buildPartial());
3016       }
3017 
3018       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()3019           getDescriptorForType() {
3020         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnRequest_descriptor;
3021       }
3022 
getDefaultInstanceForType()3023       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest getDefaultInstanceForType() {
3024         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance();
3025       }
3026 
build()3027       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest build() {
3028         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = buildPartial();
3029         if (!result.isInitialized()) {
3030           throw newUninitializedMessageException(result);
3031         }
3032         return result;
3033       }
3034 
buildPartial()3035       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest buildPartial() {
3036         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest(this);
3037         int from_bitField0_ = bitField0_;
3038         int to_bitField0_ = 0;
3039         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3040           to_bitField0_ |= 0x00000001;
3041         }
3042         if (tableNameBuilder_ == null) {
3043           result.tableName_ = tableName_;
3044         } else {
3045           result.tableName_ = tableNameBuilder_.build();
3046         }
3047         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3048           to_bitField0_ |= 0x00000002;
3049         }
3050         if (columnFamiliesBuilder_ == null) {
3051           result.columnFamilies_ = columnFamilies_;
3052         } else {
3053           result.columnFamilies_ = columnFamiliesBuilder_.build();
3054         }
3055         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
3056           to_bitField0_ |= 0x00000004;
3057         }
3058         result.nonceGroup_ = nonceGroup_;
3059         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
3060           to_bitField0_ |= 0x00000008;
3061         }
3062         result.nonce_ = nonce_;
3063         result.bitField0_ = to_bitField0_;
3064         onBuilt();
3065         return result;
3066       }
3067 
mergeFrom(com.google.protobuf.Message other)3068       public Builder mergeFrom(com.google.protobuf.Message other) {
3069         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) {
3070           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)other);
3071         } else {
3072           super.mergeFrom(other);
3073           return this;
3074         }
3075       }
3076 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other)3077       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest other) {
3078         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance()) return this;
3079         if (other.hasTableName()) {
3080           mergeTableName(other.getTableName());
3081         }
3082         if (other.hasColumnFamilies()) {
3083           mergeColumnFamilies(other.getColumnFamilies());
3084         }
3085         if (other.hasNonceGroup()) {
3086           setNonceGroup(other.getNonceGroup());
3087         }
3088         if (other.hasNonce()) {
3089           setNonce(other.getNonce());
3090         }
3091         this.mergeUnknownFields(other.getUnknownFields());
3092         return this;
3093       }
3094 
isInitialized()3095       public final boolean isInitialized() {
3096         if (!hasTableName()) {
3097 
3098           return false;
3099         }
3100         if (!hasColumnFamilies()) {
3101 
3102           return false;
3103         }
3104         if (!getTableName().isInitialized()) {
3105 
3106           return false;
3107         }
3108         if (!getColumnFamilies().isInitialized()) {
3109 
3110           return false;
3111         }
3112         return true;
3113       }
3114 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3115       public Builder mergeFrom(
3116           com.google.protobuf.CodedInputStream input,
3117           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3118           throws java.io.IOException {
3119         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest parsedMessage = null;
3120         try {
3121           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3122         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3123           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest) e.getUnfinishedMessage();
3124           throw e;
3125         } finally {
3126           if (parsedMessage != null) {
3127             mergeFrom(parsedMessage);
3128           }
3129         }
3130         return this;
3131       }
3132       private int bitField0_;
3133 
3134       // required .TableName table_name = 1;
3135       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
3136       private com.google.protobuf.SingleFieldBuilder<
3137           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
3138       /**
3139        * <code>required .TableName table_name = 1;</code>
3140        */
hasTableName()3141       public boolean hasTableName() {
3142         return ((bitField0_ & 0x00000001) == 0x00000001);
3143       }
3144       /**
3145        * <code>required .TableName table_name = 1;</code>
3146        */
getTableName()3147       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
3148         if (tableNameBuilder_ == null) {
3149           return tableName_;
3150         } else {
3151           return tableNameBuilder_.getMessage();
3152         }
3153       }
3154       /**
3155        * <code>required .TableName table_name = 1;</code>
3156        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)3157       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
3158         if (tableNameBuilder_ == null) {
3159           if (value == null) {
3160             throw new NullPointerException();
3161           }
3162           tableName_ = value;
3163           onChanged();
3164         } else {
3165           tableNameBuilder_.setMessage(value);
3166         }
3167         bitField0_ |= 0x00000001;
3168         return this;
3169       }
3170       /**
3171        * <code>required .TableName table_name = 1;</code>
3172        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)3173       public Builder setTableName(
3174           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
3175         if (tableNameBuilder_ == null) {
3176           tableName_ = builderForValue.build();
3177           onChanged();
3178         } else {
3179           tableNameBuilder_.setMessage(builderForValue.build());
3180         }
3181         bitField0_ |= 0x00000001;
3182         return this;
3183       }
3184       /**
3185        * <code>required .TableName table_name = 1;</code>
3186        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)3187       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
3188         if (tableNameBuilder_ == null) {
3189           if (((bitField0_ & 0x00000001) == 0x00000001) &&
3190               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
3191             tableName_ =
3192               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
3193           } else {
3194             tableName_ = value;
3195           }
3196           onChanged();
3197         } else {
3198           tableNameBuilder_.mergeFrom(value);
3199         }
3200         bitField0_ |= 0x00000001;
3201         return this;
3202       }
3203       /**
3204        * <code>required .TableName table_name = 1;</code>
3205        */
clearTableName()3206       public Builder clearTableName() {
3207         if (tableNameBuilder_ == null) {
3208           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
3209           onChanged();
3210         } else {
3211           tableNameBuilder_.clear();
3212         }
3213         bitField0_ = (bitField0_ & ~0x00000001);
3214         return this;
3215       }
3216       /**
3217        * <code>required .TableName table_name = 1;</code>
3218        */
getTableNameBuilder()3219       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
3220         bitField0_ |= 0x00000001;
3221         onChanged();
3222         return getTableNameFieldBuilder().getBuilder();
3223       }
3224       /**
3225        * <code>required .TableName table_name = 1;</code>
3226        */
getTableNameOrBuilder()3227       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
3228         if (tableNameBuilder_ != null) {
3229           return tableNameBuilder_.getMessageOrBuilder();
3230         } else {
3231           return tableName_;
3232         }
3233       }
3234       /**
3235        * <code>required .TableName table_name = 1;</code>
3236        */
3237       private com.google.protobuf.SingleFieldBuilder<
3238           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()3239           getTableNameFieldBuilder() {
3240         if (tableNameBuilder_ == null) {
3241           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3242               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
3243                   tableName_,
3244                   getParentForChildren(),
3245                   isClean());
3246           tableName_ = null;
3247         }
3248         return tableNameBuilder_;
3249       }
3250 
3251       // required .ColumnFamilySchema column_families = 2;
3252       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
3253       private com.google.protobuf.SingleFieldBuilder<
3254           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_;
3255       /**
3256        * <code>required .ColumnFamilySchema column_families = 2;</code>
3257        */
hasColumnFamilies()3258       public boolean hasColumnFamilies() {
3259         return ((bitField0_ & 0x00000002) == 0x00000002);
3260       }
3261       /**
3262        * <code>required .ColumnFamilySchema column_families = 2;</code>
3263        */
getColumnFamilies()3264       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies() {
3265         if (columnFamiliesBuilder_ == null) {
3266           return columnFamilies_;
3267         } else {
3268           return columnFamiliesBuilder_.getMessage();
3269         }
3270       }
3271       /**
3272        * <code>required .ColumnFamilySchema column_families = 2;</code>
3273        */
setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)3274       public Builder setColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
3275         if (columnFamiliesBuilder_ == null) {
3276           if (value == null) {
3277             throw new NullPointerException();
3278           }
3279           columnFamilies_ = value;
3280           onChanged();
3281         } else {
3282           columnFamiliesBuilder_.setMessage(value);
3283         }
3284         bitField0_ |= 0x00000002;
3285         return this;
3286       }
3287       /**
3288        * <code>required .ColumnFamilySchema column_families = 2;</code>
3289        */
setColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue)3290       public Builder setColumnFamilies(
3291           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
3292         if (columnFamiliesBuilder_ == null) {
3293           columnFamilies_ = builderForValue.build();
3294           onChanged();
3295         } else {
3296           columnFamiliesBuilder_.setMessage(builderForValue.build());
3297         }
3298         bitField0_ |= 0x00000002;
3299         return this;
3300       }
3301       /**
3302        * <code>required .ColumnFamilySchema column_families = 2;</code>
3303        */
mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)3304       public Builder mergeColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
3305         if (columnFamiliesBuilder_ == null) {
3306           if (((bitField0_ & 0x00000002) == 0x00000002) &&
3307               columnFamilies_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) {
3308             columnFamilies_ =
3309               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnFamilies_).mergeFrom(value).buildPartial();
3310           } else {
3311             columnFamilies_ = value;
3312           }
3313           onChanged();
3314         } else {
3315           columnFamiliesBuilder_.mergeFrom(value);
3316         }
3317         bitField0_ |= 0x00000002;
3318         return this;
3319       }
3320       /**
3321        * <code>required .ColumnFamilySchema column_families = 2;</code>
3322        */
clearColumnFamilies()3323       public Builder clearColumnFamilies() {
3324         if (columnFamiliesBuilder_ == null) {
3325           columnFamilies_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
3326           onChanged();
3327         } else {
3328           columnFamiliesBuilder_.clear();
3329         }
3330         bitField0_ = (bitField0_ & ~0x00000002);
3331         return this;
3332       }
3333       /**
3334        * <code>required .ColumnFamilySchema column_families = 2;</code>
3335        */
getColumnFamiliesBuilder()3336       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder() {
3337         bitField0_ |= 0x00000002;
3338         onChanged();
3339         return getColumnFamiliesFieldBuilder().getBuilder();
3340       }
3341       /**
3342        * <code>required .ColumnFamilySchema column_families = 2;</code>
3343        */
getColumnFamiliesOrBuilder()3344       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder() {
3345         if (columnFamiliesBuilder_ != null) {
3346           return columnFamiliesBuilder_.getMessageOrBuilder();
3347         } else {
3348           return columnFamilies_;
3349         }
3350       }
3351       /**
3352        * <code>required .ColumnFamilySchema column_families = 2;</code>
3353        */
3354       private com.google.protobuf.SingleFieldBuilder<
3355           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
getColumnFamiliesFieldBuilder()3356           getColumnFamiliesFieldBuilder() {
3357         if (columnFamiliesBuilder_ == null) {
3358           columnFamiliesBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3359               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>(
3360                   columnFamilies_,
3361                   getParentForChildren(),
3362                   isClean());
3363           columnFamilies_ = null;
3364         }
3365         return columnFamiliesBuilder_;
3366       }
3367 
3368       // optional uint64 nonce_group = 3 [default = 0];
3369       private long nonceGroup_ ;
3370       /**
3371        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
3372        */
hasNonceGroup()3373       public boolean hasNonceGroup() {
3374         return ((bitField0_ & 0x00000004) == 0x00000004);
3375       }
3376       /**
3377        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
3378        */
getNonceGroup()3379       public long getNonceGroup() {
3380         return nonceGroup_;
3381       }
3382       /**
3383        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
3384        */
setNonceGroup(long value)3385       public Builder setNonceGroup(long value) {
3386         bitField0_ |= 0x00000004;
3387         nonceGroup_ = value;
3388         onChanged();
3389         return this;
3390       }
3391       /**
3392        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
3393        */
clearNonceGroup()3394       public Builder clearNonceGroup() {
3395         bitField0_ = (bitField0_ & ~0x00000004);
3396         nonceGroup_ = 0L;
3397         onChanged();
3398         return this;
3399       }
3400 
3401       // optional uint64 nonce = 4 [default = 0];
3402       private long nonce_ ;
3403       /**
3404        * <code>optional uint64 nonce = 4 [default = 0];</code>
3405        */
hasNonce()3406       public boolean hasNonce() {
3407         return ((bitField0_ & 0x00000008) == 0x00000008);
3408       }
3409       /**
3410        * <code>optional uint64 nonce = 4 [default = 0];</code>
3411        */
getNonce()3412       public long getNonce() {
3413         return nonce_;
3414       }
3415       /**
3416        * <code>optional uint64 nonce = 4 [default = 0];</code>
3417        */
setNonce(long value)3418       public Builder setNonce(long value) {
3419         bitField0_ |= 0x00000008;
3420         nonce_ = value;
3421         onChanged();
3422         return this;
3423       }
3424       /**
3425        * <code>optional uint64 nonce = 4 [default = 0];</code>
3426        */
clearNonce()3427       public Builder clearNonce() {
3428         bitField0_ = (bitField0_ & ~0x00000008);
3429         nonce_ = 0L;
3430         onChanged();
3431         return this;
3432       }
3433 
3434       // @@protoc_insertion_point(builder_scope:ModifyColumnRequest)
3435     }
3436 
3437     static {
3438       defaultInstance = new ModifyColumnRequest(true);
defaultInstance.initFields()3439       defaultInstance.initFields();
3440     }
3441 
3442     // @@protoc_insertion_point(class_scope:ModifyColumnRequest)
3443   }
3444 
3445   public interface ModifyColumnResponseOrBuilder
3446       extends com.google.protobuf.MessageOrBuilder {
3447   }
3448   /**
3449    * Protobuf type {@code ModifyColumnResponse}
3450    */
3451   public static final class ModifyColumnResponse extends
3452       com.google.protobuf.GeneratedMessage
3453       implements ModifyColumnResponseOrBuilder {
3454     // Use ModifyColumnResponse.newBuilder() to construct.
ModifyColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)3455     private ModifyColumnResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3456       super(builder);
3457       this.unknownFields = builder.getUnknownFields();
3458     }
ModifyColumnResponse(boolean noInit)3459     private ModifyColumnResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3460 
3461     private static final ModifyColumnResponse defaultInstance;
getDefaultInstance()3462     public static ModifyColumnResponse getDefaultInstance() {
3463       return defaultInstance;
3464     }
3465 
getDefaultInstanceForType()3466     public ModifyColumnResponse getDefaultInstanceForType() {
3467       return defaultInstance;
3468     }
3469 
3470     private final com.google.protobuf.UnknownFieldSet unknownFields;
3471     @java.lang.Override
3472     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()3473         getUnknownFields() {
3474       return this.unknownFields;
3475     }
ModifyColumnResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3476     private ModifyColumnResponse(
3477         com.google.protobuf.CodedInputStream input,
3478         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3479         throws com.google.protobuf.InvalidProtocolBufferException {
3480       initFields();
3481       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3482           com.google.protobuf.UnknownFieldSet.newBuilder();
3483       try {
3484         boolean done = false;
3485         while (!done) {
3486           int tag = input.readTag();
3487           switch (tag) {
3488             case 0:
3489               done = true;
3490               break;
3491             default: {
3492               if (!parseUnknownField(input, unknownFields,
3493                                      extensionRegistry, tag)) {
3494                 done = true;
3495               }
3496               break;
3497             }
3498           }
3499         }
3500       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3501         throw e.setUnfinishedMessage(this);
3502       } catch (java.io.IOException e) {
3503         throw new com.google.protobuf.InvalidProtocolBufferException(
3504             e.getMessage()).setUnfinishedMessage(this);
3505       } finally {
3506         this.unknownFields = unknownFields.build();
3507         makeExtensionsImmutable();
3508       }
3509     }
3510     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3511         getDescriptor() {
3512       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor;
3513     }
3514 
3515     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3516         internalGetFieldAccessorTable() {
3517       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_fieldAccessorTable
3518           .ensureFieldAccessorsInitialized(
3519               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.Builder.class);
3520     }
3521 
3522     public static com.google.protobuf.Parser<ModifyColumnResponse> PARSER =
3523         new com.google.protobuf.AbstractParser<ModifyColumnResponse>() {
3524       public ModifyColumnResponse parsePartialFrom(
3525           com.google.protobuf.CodedInputStream input,
3526           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3527           throws com.google.protobuf.InvalidProtocolBufferException {
3528         return new ModifyColumnResponse(input, extensionRegistry);
3529       }
3530     };
3531 
3532     @java.lang.Override
getParserForType()3533     public com.google.protobuf.Parser<ModifyColumnResponse> getParserForType() {
3534       return PARSER;
3535     }
3536 
initFields()3537     private void initFields() {
3538     }
3539     private byte memoizedIsInitialized = -1;
isInitialized()3540     public final boolean isInitialized() {
3541       byte isInitialized = memoizedIsInitialized;
3542       if (isInitialized != -1) return isInitialized == 1;
3543 
3544       memoizedIsInitialized = 1;
3545       return true;
3546     }
3547 
writeTo(com.google.protobuf.CodedOutputStream output)3548     public void writeTo(com.google.protobuf.CodedOutputStream output)
3549                         throws java.io.IOException {
3550       getSerializedSize();
3551       getUnknownFields().writeTo(output);
3552     }
3553 
3554     private int memoizedSerializedSize = -1;
getSerializedSize()3555     public int getSerializedSize() {
3556       int size = memoizedSerializedSize;
3557       if (size != -1) return size;
3558 
3559       size = 0;
3560       size += getUnknownFields().getSerializedSize();
3561       memoizedSerializedSize = size;
3562       return size;
3563     }
3564 
3565     private static final long serialVersionUID = 0L;
3566     @java.lang.Override
writeReplace()3567     protected java.lang.Object writeReplace()
3568         throws java.io.ObjectStreamException {
3569       return super.writeReplace();
3570     }
3571 
3572     @java.lang.Override
equals(final java.lang.Object obj)3573     public boolean equals(final java.lang.Object obj) {
3574       if (obj == this) {
3575        return true;
3576       }
3577       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse)) {
3578         return super.equals(obj);
3579       }
3580       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) obj;
3581 
3582       boolean result = true;
3583       result = result &&
3584           getUnknownFields().equals(other.getUnknownFields());
3585       return result;
3586     }
3587 
3588     private int memoizedHashCode = 0;
3589     @java.lang.Override
hashCode()3590     public int hashCode() {
3591       if (memoizedHashCode != 0) {
3592         return memoizedHashCode;
3593       }
3594       int hash = 41;
3595       hash = (19 * hash) + getDescriptorForType().hashCode();
3596       hash = (29 * hash) + getUnknownFields().hashCode();
3597       memoizedHashCode = hash;
3598       return hash;
3599     }
3600 
parseFrom( com.google.protobuf.ByteString data)3601     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(
3602         com.google.protobuf.ByteString data)
3603         throws com.google.protobuf.InvalidProtocolBufferException {
3604       return PARSER.parseFrom(data);
3605     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3606     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(
3607         com.google.protobuf.ByteString data,
3608         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3609         throws com.google.protobuf.InvalidProtocolBufferException {
3610       return PARSER.parseFrom(data, extensionRegistry);
3611     }
parseFrom(byte[] data)3612     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(byte[] data)
3613         throws com.google.protobuf.InvalidProtocolBufferException {
3614       return PARSER.parseFrom(data);
3615     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3616     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(
3617         byte[] data,
3618         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3619         throws com.google.protobuf.InvalidProtocolBufferException {
3620       return PARSER.parseFrom(data, extensionRegistry);
3621     }
parseFrom(java.io.InputStream input)3622     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(java.io.InputStream input)
3623         throws java.io.IOException {
3624       return PARSER.parseFrom(input);
3625     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3626     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(
3627         java.io.InputStream input,
3628         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3629         throws java.io.IOException {
3630       return PARSER.parseFrom(input, extensionRegistry);
3631     }
parseDelimitedFrom(java.io.InputStream input)3632     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom(java.io.InputStream input)
3633         throws java.io.IOException {
3634       return PARSER.parseDelimitedFrom(input);
3635     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3636     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseDelimitedFrom(
3637         java.io.InputStream input,
3638         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3639         throws java.io.IOException {
3640       return PARSER.parseDelimitedFrom(input, extensionRegistry);
3641     }
parseFrom( com.google.protobuf.CodedInputStream input)3642     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(
3643         com.google.protobuf.CodedInputStream input)
3644         throws java.io.IOException {
3645       return PARSER.parseFrom(input);
3646     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3647     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parseFrom(
3648         com.google.protobuf.CodedInputStream input,
3649         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3650         throws java.io.IOException {
3651       return PARSER.parseFrom(input, extensionRegistry);
3652     }
3653 
newBuilder()3654     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()3655     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse prototype)3656     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse prototype) {
3657       return newBuilder().mergeFrom(prototype);
3658     }
toBuilder()3659     public Builder toBuilder() { return newBuilder(this); }
3660 
3661     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3662     protected Builder newBuilderForType(
3663         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3664       Builder builder = new Builder(parent);
3665       return builder;
3666     }
3667     /**
3668      * Protobuf type {@code ModifyColumnResponse}
3669      */
3670     public static final class Builder extends
3671         com.google.protobuf.GeneratedMessage.Builder<Builder>
3672        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponseOrBuilder {
3673       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3674           getDescriptor() {
3675         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor;
3676       }
3677 
3678       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3679           internalGetFieldAccessorTable() {
3680         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_fieldAccessorTable
3681             .ensureFieldAccessorsInitialized(
3682                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.Builder.class);
3683       }
3684 
3685       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.newBuilder()
Builder()3686       private Builder() {
3687         maybeForceBuilderInitialization();
3688       }
3689 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3690       private Builder(
3691           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3692         super(parent);
3693         maybeForceBuilderInitialization();
3694       }
maybeForceBuilderInitialization()3695       private void maybeForceBuilderInitialization() {
3696         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3697         }
3698       }
create()3699       private static Builder create() {
3700         return new Builder();
3701       }
3702 
clear()3703       public Builder clear() {
3704         super.clear();
3705         return this;
3706       }
3707 
clone()3708       public Builder clone() {
3709         return create().mergeFrom(buildPartial());
3710       }
3711 
3712       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()3713           getDescriptorForType() {
3714         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyColumnResponse_descriptor;
3715       }
3716 
getDefaultInstanceForType()3717       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse getDefaultInstanceForType() {
3718         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance();
3719       }
3720 
build()3721       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse build() {
3722         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = buildPartial();
3723         if (!result.isInitialized()) {
3724           throw newUninitializedMessageException(result);
3725         }
3726         return result;
3727       }
3728 
buildPartial()3729       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse buildPartial() {
3730         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse(this);
3731         onBuilt();
3732         return result;
3733       }
3734 
mergeFrom(com.google.protobuf.Message other)3735       public Builder mergeFrom(com.google.protobuf.Message other) {
3736         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) {
3737           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse)other);
3738         } else {
3739           super.mergeFrom(other);
3740           return this;
3741         }
3742       }
3743 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other)3744       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse other) {
3745         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance()) return this;
3746         this.mergeUnknownFields(other.getUnknownFields());
3747         return this;
3748       }
3749 
isInitialized()3750       public final boolean isInitialized() {
3751         return true;
3752       }
3753 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3754       public Builder mergeFrom(
3755           com.google.protobuf.CodedInputStream input,
3756           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3757           throws java.io.IOException {
3758         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse parsedMessage = null;
3759         try {
3760           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3761         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3762           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) e.getUnfinishedMessage();
3763           throw e;
3764         } finally {
3765           if (parsedMessage != null) {
3766             mergeFrom(parsedMessage);
3767           }
3768         }
3769         return this;
3770       }
3771 
3772       // @@protoc_insertion_point(builder_scope:ModifyColumnResponse)
3773     }
3774 
3775     static {
3776       defaultInstance = new ModifyColumnResponse(true);
defaultInstance.initFields()3777       defaultInstance.initFields();
3778     }
3779 
3780     // @@protoc_insertion_point(class_scope:ModifyColumnResponse)
3781   }
3782 
3783   public interface MoveRegionRequestOrBuilder
3784       extends com.google.protobuf.MessageOrBuilder {
3785 
3786     // required .RegionSpecifier region = 1;
3787     /**
3788      * <code>required .RegionSpecifier region = 1;</code>
3789      */
hasRegion()3790     boolean hasRegion();
3791     /**
3792      * <code>required .RegionSpecifier region = 1;</code>
3793      */
getRegion()3794     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
3795     /**
3796      * <code>required .RegionSpecifier region = 1;</code>
3797      */
getRegionOrBuilder()3798     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
3799 
3800     // optional .ServerName dest_server_name = 2;
3801     /**
3802      * <code>optional .ServerName dest_server_name = 2;</code>
3803      */
hasDestServerName()3804     boolean hasDestServerName();
3805     /**
3806      * <code>optional .ServerName dest_server_name = 2;</code>
3807      */
getDestServerName()3808     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName();
3809     /**
3810      * <code>optional .ServerName dest_server_name = 2;</code>
3811      */
getDestServerNameOrBuilder()3812     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder();
3813   }
3814   /**
3815    * Protobuf type {@code MoveRegionRequest}
3816    */
3817   public static final class MoveRegionRequest extends
3818       com.google.protobuf.GeneratedMessage
3819       implements MoveRegionRequestOrBuilder {
3820     // Use MoveRegionRequest.newBuilder() to construct.
MoveRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)3821     private MoveRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3822       super(builder);
3823       this.unknownFields = builder.getUnknownFields();
3824     }
MoveRegionRequest(boolean noInit)3825     private MoveRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3826 
3827     private static final MoveRegionRequest defaultInstance;
getDefaultInstance()3828     public static MoveRegionRequest getDefaultInstance() {
3829       return defaultInstance;
3830     }
3831 
getDefaultInstanceForType()3832     public MoveRegionRequest getDefaultInstanceForType() {
3833       return defaultInstance;
3834     }
3835 
3836     private final com.google.protobuf.UnknownFieldSet unknownFields;
3837     @java.lang.Override
3838     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()3839         getUnknownFields() {
3840       return this.unknownFields;
3841     }
MoveRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3842     private MoveRegionRequest(
3843         com.google.protobuf.CodedInputStream input,
3844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3845         throws com.google.protobuf.InvalidProtocolBufferException {
3846       initFields();
3847       int mutable_bitField0_ = 0;
3848       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3849           com.google.protobuf.UnknownFieldSet.newBuilder();
3850       try {
3851         boolean done = false;
3852         while (!done) {
3853           int tag = input.readTag();
3854           switch (tag) {
3855             case 0:
3856               done = true;
3857               break;
3858             default: {
3859               if (!parseUnknownField(input, unknownFields,
3860                                      extensionRegistry, tag)) {
3861                 done = true;
3862               }
3863               break;
3864             }
3865             case 10: {
3866               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
3867               if (((bitField0_ & 0x00000001) == 0x00000001)) {
3868                 subBuilder = region_.toBuilder();
3869               }
3870               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
3871               if (subBuilder != null) {
3872                 subBuilder.mergeFrom(region_);
3873                 region_ = subBuilder.buildPartial();
3874               }
3875               bitField0_ |= 0x00000001;
3876               break;
3877             }
3878             case 18: {
3879               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
3880               if (((bitField0_ & 0x00000002) == 0x00000002)) {
3881                 subBuilder = destServerName_.toBuilder();
3882               }
3883               destServerName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
3884               if (subBuilder != null) {
3885                 subBuilder.mergeFrom(destServerName_);
3886                 destServerName_ = subBuilder.buildPartial();
3887               }
3888               bitField0_ |= 0x00000002;
3889               break;
3890             }
3891           }
3892         }
3893       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3894         throw e.setUnfinishedMessage(this);
3895       } catch (java.io.IOException e) {
3896         throw new com.google.protobuf.InvalidProtocolBufferException(
3897             e.getMessage()).setUnfinishedMessage(this);
3898       } finally {
3899         this.unknownFields = unknownFields.build();
3900         makeExtensionsImmutable();
3901       }
3902     }
3903     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3904         getDescriptor() {
3905       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_descriptor;
3906     }
3907 
3908     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3909         internalGetFieldAccessorTable() {
3910       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_fieldAccessorTable
3911           .ensureFieldAccessorsInitialized(
3912               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.Builder.class);
3913     }
3914 
3915     public static com.google.protobuf.Parser<MoveRegionRequest> PARSER =
3916         new com.google.protobuf.AbstractParser<MoveRegionRequest>() {
3917       public MoveRegionRequest parsePartialFrom(
3918           com.google.protobuf.CodedInputStream input,
3919           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3920           throws com.google.protobuf.InvalidProtocolBufferException {
3921         return new MoveRegionRequest(input, extensionRegistry);
3922       }
3923     };
3924 
3925     @java.lang.Override
getParserForType()3926     public com.google.protobuf.Parser<MoveRegionRequest> getParserForType() {
3927       return PARSER;
3928     }
3929 
3930     private int bitField0_;
3931     // required .RegionSpecifier region = 1;
3932     public static final int REGION_FIELD_NUMBER = 1;
3933     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
3934     /**
3935      * <code>required .RegionSpecifier region = 1;</code>
3936      */
hasRegion()3937     public boolean hasRegion() {
3938       return ((bitField0_ & 0x00000001) == 0x00000001);
3939     }
3940     /**
3941      * <code>required .RegionSpecifier region = 1;</code>
3942      */
getRegion()3943     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
3944       return region_;
3945     }
3946     /**
3947      * <code>required .RegionSpecifier region = 1;</code>
3948      */
getRegionOrBuilder()3949     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
3950       return region_;
3951     }
3952 
3953     // optional .ServerName dest_server_name = 2;
3954     public static final int DEST_SERVER_NAME_FIELD_NUMBER = 2;
3955     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_;
3956     /**
3957      * <code>optional .ServerName dest_server_name = 2;</code>
3958      */
hasDestServerName()3959     public boolean hasDestServerName() {
3960       return ((bitField0_ & 0x00000002) == 0x00000002);
3961     }
3962     /**
3963      * <code>optional .ServerName dest_server_name = 2;</code>
3964      */
getDestServerName()3965     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() {
3966       return destServerName_;
3967     }
3968     /**
3969      * <code>optional .ServerName dest_server_name = 2;</code>
3970      */
getDestServerNameOrBuilder()3971     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() {
3972       return destServerName_;
3973     }
3974 
initFields()3975     private void initFields() {
3976       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
3977       destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3978     }
3979     private byte memoizedIsInitialized = -1;
isInitialized()3980     public final boolean isInitialized() {
3981       byte isInitialized = memoizedIsInitialized;
3982       if (isInitialized != -1) return isInitialized == 1;
3983 
3984       if (!hasRegion()) {
3985         memoizedIsInitialized = 0;
3986         return false;
3987       }
3988       if (!getRegion().isInitialized()) {
3989         memoizedIsInitialized = 0;
3990         return false;
3991       }
3992       if (hasDestServerName()) {
3993         if (!getDestServerName().isInitialized()) {
3994           memoizedIsInitialized = 0;
3995           return false;
3996         }
3997       }
3998       memoizedIsInitialized = 1;
3999       return true;
4000     }
4001 
writeTo(com.google.protobuf.CodedOutputStream output)4002     public void writeTo(com.google.protobuf.CodedOutputStream output)
4003                         throws java.io.IOException {
4004       getSerializedSize();
4005       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4006         output.writeMessage(1, region_);
4007       }
4008       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4009         output.writeMessage(2, destServerName_);
4010       }
4011       getUnknownFields().writeTo(output);
4012     }
4013 
4014     private int memoizedSerializedSize = -1;
getSerializedSize()4015     public int getSerializedSize() {
4016       int size = memoizedSerializedSize;
4017       if (size != -1) return size;
4018 
4019       size = 0;
4020       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4021         size += com.google.protobuf.CodedOutputStream
4022           .computeMessageSize(1, region_);
4023       }
4024       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4025         size += com.google.protobuf.CodedOutputStream
4026           .computeMessageSize(2, destServerName_);
4027       }
4028       size += getUnknownFields().getSerializedSize();
4029       memoizedSerializedSize = size;
4030       return size;
4031     }
4032 
4033     private static final long serialVersionUID = 0L;
4034     @java.lang.Override
writeReplace()4035     protected java.lang.Object writeReplace()
4036         throws java.io.ObjectStreamException {
4037       return super.writeReplace();
4038     }
4039 
4040     @java.lang.Override
equals(final java.lang.Object obj)4041     public boolean equals(final java.lang.Object obj) {
4042       if (obj == this) {
4043        return true;
4044       }
4045       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)) {
4046         return super.equals(obj);
4047       }
4048       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest) obj;
4049 
4050       boolean result = true;
4051       result = result && (hasRegion() == other.hasRegion());
4052       if (hasRegion()) {
4053         result = result && getRegion()
4054             .equals(other.getRegion());
4055       }
4056       result = result && (hasDestServerName() == other.hasDestServerName());
4057       if (hasDestServerName()) {
4058         result = result && getDestServerName()
4059             .equals(other.getDestServerName());
4060       }
4061       result = result &&
4062           getUnknownFields().equals(other.getUnknownFields());
4063       return result;
4064     }
4065 
4066     private int memoizedHashCode = 0;
4067     @java.lang.Override
hashCode()4068     public int hashCode() {
4069       if (memoizedHashCode != 0) {
4070         return memoizedHashCode;
4071       }
4072       int hash = 41;
4073       hash = (19 * hash) + getDescriptorForType().hashCode();
4074       if (hasRegion()) {
4075         hash = (37 * hash) + REGION_FIELD_NUMBER;
4076         hash = (53 * hash) + getRegion().hashCode();
4077       }
4078       if (hasDestServerName()) {
4079         hash = (37 * hash) + DEST_SERVER_NAME_FIELD_NUMBER;
4080         hash = (53 * hash) + getDestServerName().hashCode();
4081       }
4082       hash = (29 * hash) + getUnknownFields().hashCode();
4083       memoizedHashCode = hash;
4084       return hash;
4085     }
4086 
parseFrom( com.google.protobuf.ByteString data)4087     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(
4088         com.google.protobuf.ByteString data)
4089         throws com.google.protobuf.InvalidProtocolBufferException {
4090       return PARSER.parseFrom(data);
4091     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4092     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(
4093         com.google.protobuf.ByteString data,
4094         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4095         throws com.google.protobuf.InvalidProtocolBufferException {
4096       return PARSER.parseFrom(data, extensionRegistry);
4097     }
parseFrom(byte[] data)4098     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(byte[] data)
4099         throws com.google.protobuf.InvalidProtocolBufferException {
4100       return PARSER.parseFrom(data);
4101     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4102     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(
4103         byte[] data,
4104         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4105         throws com.google.protobuf.InvalidProtocolBufferException {
4106       return PARSER.parseFrom(data, extensionRegistry);
4107     }
parseFrom(java.io.InputStream input)4108     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(java.io.InputStream input)
4109         throws java.io.IOException {
4110       return PARSER.parseFrom(input);
4111     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4112     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(
4113         java.io.InputStream input,
4114         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4115         throws java.io.IOException {
4116       return PARSER.parseFrom(input, extensionRegistry);
4117     }
parseDelimitedFrom(java.io.InputStream input)4118     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseDelimitedFrom(java.io.InputStream input)
4119         throws java.io.IOException {
4120       return PARSER.parseDelimitedFrom(input);
4121     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4122     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseDelimitedFrom(
4123         java.io.InputStream input,
4124         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4125         throws java.io.IOException {
4126       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4127     }
parseFrom( com.google.protobuf.CodedInputStream input)4128     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(
4129         com.google.protobuf.CodedInputStream input)
4130         throws java.io.IOException {
4131       return PARSER.parseFrom(input);
4132     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4133     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parseFrom(
4134         com.google.protobuf.CodedInputStream input,
4135         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4136         throws java.io.IOException {
4137       return PARSER.parseFrom(input, extensionRegistry);
4138     }
4139 
newBuilder()4140     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()4141     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest prototype)4142     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest prototype) {
4143       return newBuilder().mergeFrom(prototype);
4144     }
toBuilder()4145     public Builder toBuilder() { return newBuilder(this); }
4146 
4147     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4148     protected Builder newBuilderForType(
4149         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4150       Builder builder = new Builder(parent);
4151       return builder;
4152     }
4153     /**
4154      * Protobuf type {@code MoveRegionRequest}
4155      */
4156     public static final class Builder extends
4157         com.google.protobuf.GeneratedMessage.Builder<Builder>
4158        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequestOrBuilder {
4159       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4160           getDescriptor() {
4161         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_descriptor;
4162       }
4163 
4164       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4165           internalGetFieldAccessorTable() {
4166         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_fieldAccessorTable
4167             .ensureFieldAccessorsInitialized(
4168                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.Builder.class);
4169       }
4170 
4171       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.newBuilder()
Builder()4172       private Builder() {
4173         maybeForceBuilderInitialization();
4174       }
4175 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4176       private Builder(
4177           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4178         super(parent);
4179         maybeForceBuilderInitialization();
4180       }
maybeForceBuilderInitialization()4181       private void maybeForceBuilderInitialization() {
4182         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4183           getRegionFieldBuilder();
4184           getDestServerNameFieldBuilder();
4185         }
4186       }
create()4187       private static Builder create() {
4188         return new Builder();
4189       }
4190 
clear()4191       public Builder clear() {
4192         super.clear();
4193         if (regionBuilder_ == null) {
4194           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
4195         } else {
4196           regionBuilder_.clear();
4197         }
4198         bitField0_ = (bitField0_ & ~0x00000001);
4199         if (destServerNameBuilder_ == null) {
4200           destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
4201         } else {
4202           destServerNameBuilder_.clear();
4203         }
4204         bitField0_ = (bitField0_ & ~0x00000002);
4205         return this;
4206       }
4207 
clone()4208       public Builder clone() {
4209         return create().mergeFrom(buildPartial());
4210       }
4211 
4212       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4213           getDescriptorForType() {
4214         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionRequest_descriptor;
4215       }
4216 
getDefaultInstanceForType()4217       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest getDefaultInstanceForType() {
4218         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance();
4219       }
4220 
build()4221       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest build() {
4222         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest result = buildPartial();
4223         if (!result.isInitialized()) {
4224           throw newUninitializedMessageException(result);
4225         }
4226         return result;
4227       }
4228 
buildPartial()4229       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest buildPartial() {
4230         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest(this);
4231         int from_bitField0_ = bitField0_;
4232         int to_bitField0_ = 0;
4233         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4234           to_bitField0_ |= 0x00000001;
4235         }
4236         if (regionBuilder_ == null) {
4237           result.region_ = region_;
4238         } else {
4239           result.region_ = regionBuilder_.build();
4240         }
4241         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4242           to_bitField0_ |= 0x00000002;
4243         }
4244         if (destServerNameBuilder_ == null) {
4245           result.destServerName_ = destServerName_;
4246         } else {
4247           result.destServerName_ = destServerNameBuilder_.build();
4248         }
4249         result.bitField0_ = to_bitField0_;
4250         onBuilt();
4251         return result;
4252       }
4253 
mergeFrom(com.google.protobuf.Message other)4254       public Builder mergeFrom(com.google.protobuf.Message other) {
4255         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest) {
4256           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)other);
4257         } else {
4258           super.mergeFrom(other);
4259           return this;
4260         }
4261       }
4262 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest other)4263       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest other) {
4264         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance()) return this;
4265         if (other.hasRegion()) {
4266           mergeRegion(other.getRegion());
4267         }
4268         if (other.hasDestServerName()) {
4269           mergeDestServerName(other.getDestServerName());
4270         }
4271         this.mergeUnknownFields(other.getUnknownFields());
4272         return this;
4273       }
4274 
isInitialized()4275       public final boolean isInitialized() {
4276         if (!hasRegion()) {
4277 
4278           return false;
4279         }
4280         if (!getRegion().isInitialized()) {
4281 
4282           return false;
4283         }
4284         if (hasDestServerName()) {
4285           if (!getDestServerName().isInitialized()) {
4286 
4287             return false;
4288           }
4289         }
4290         return true;
4291       }
4292 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4293       public Builder mergeFrom(
4294           com.google.protobuf.CodedInputStream input,
4295           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4296           throws java.io.IOException {
4297         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest parsedMessage = null;
4298         try {
4299           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4300         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4301           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest) e.getUnfinishedMessage();
4302           throw e;
4303         } finally {
4304           if (parsedMessage != null) {
4305             mergeFrom(parsedMessage);
4306           }
4307         }
4308         return this;
4309       }
4310       private int bitField0_;
4311 
4312       // required .RegionSpecifier region = 1;
4313       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
4314       private com.google.protobuf.SingleFieldBuilder<
4315           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
4316       /**
4317        * <code>required .RegionSpecifier region = 1;</code>
4318        */
hasRegion()4319       public boolean hasRegion() {
4320         return ((bitField0_ & 0x00000001) == 0x00000001);
4321       }
4322       /**
4323        * <code>required .RegionSpecifier region = 1;</code>
4324        */
getRegion()4325       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
4326         if (regionBuilder_ == null) {
4327           return region_;
4328         } else {
4329           return regionBuilder_.getMessage();
4330         }
4331       }
4332       /**
4333        * <code>required .RegionSpecifier region = 1;</code>
4334        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)4335       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
4336         if (regionBuilder_ == null) {
4337           if (value == null) {
4338             throw new NullPointerException();
4339           }
4340           region_ = value;
4341           onChanged();
4342         } else {
4343           regionBuilder_.setMessage(value);
4344         }
4345         bitField0_ |= 0x00000001;
4346         return this;
4347       }
4348       /**
4349        * <code>required .RegionSpecifier region = 1;</code>
4350        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)4351       public Builder setRegion(
4352           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
4353         if (regionBuilder_ == null) {
4354           region_ = builderForValue.build();
4355           onChanged();
4356         } else {
4357           regionBuilder_.setMessage(builderForValue.build());
4358         }
4359         bitField0_ |= 0x00000001;
4360         return this;
4361       }
4362       /**
4363        * <code>required .RegionSpecifier region = 1;</code>
4364        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)4365       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
4366         if (regionBuilder_ == null) {
4367           if (((bitField0_ & 0x00000001) == 0x00000001) &&
4368               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
4369             region_ =
4370               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
4371           } else {
4372             region_ = value;
4373           }
4374           onChanged();
4375         } else {
4376           regionBuilder_.mergeFrom(value);
4377         }
4378         bitField0_ |= 0x00000001;
4379         return this;
4380       }
4381       /**
4382        * <code>required .RegionSpecifier region = 1;</code>
4383        */
clearRegion()4384       public Builder clearRegion() {
4385         if (regionBuilder_ == null) {
4386           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
4387           onChanged();
4388         } else {
4389           regionBuilder_.clear();
4390         }
4391         bitField0_ = (bitField0_ & ~0x00000001);
4392         return this;
4393       }
4394       /**
4395        * <code>required .RegionSpecifier region = 1;</code>
4396        */
getRegionBuilder()4397       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
4398         bitField0_ |= 0x00000001;
4399         onChanged();
4400         return getRegionFieldBuilder().getBuilder();
4401       }
4402       /**
4403        * <code>required .RegionSpecifier region = 1;</code>
4404        */
getRegionOrBuilder()4405       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
4406         if (regionBuilder_ != null) {
4407           return regionBuilder_.getMessageOrBuilder();
4408         } else {
4409           return region_;
4410         }
4411       }
4412       /**
4413        * <code>required .RegionSpecifier region = 1;</code>
4414        */
4415       private com.google.protobuf.SingleFieldBuilder<
4416           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()4417           getRegionFieldBuilder() {
4418         if (regionBuilder_ == null) {
4419           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
4420               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
4421                   region_,
4422                   getParentForChildren(),
4423                   isClean());
4424           region_ = null;
4425         }
4426         return regionBuilder_;
4427       }
4428 
4429       // optional .ServerName dest_server_name = 2;
4430       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
4431       private com.google.protobuf.SingleFieldBuilder<
4432           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destServerNameBuilder_;
4433       /**
4434        * <code>optional .ServerName dest_server_name = 2;</code>
4435        */
hasDestServerName()4436       public boolean hasDestServerName() {
4437         return ((bitField0_ & 0x00000002) == 0x00000002);
4438       }
4439       /**
4440        * <code>optional .ServerName dest_server_name = 2;</code>
4441        */
getDestServerName()4442       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestServerName() {
4443         if (destServerNameBuilder_ == null) {
4444           return destServerName_;
4445         } else {
4446           return destServerNameBuilder_.getMessage();
4447         }
4448       }
4449       /**
4450        * <code>optional .ServerName dest_server_name = 2;</code>
4451        */
setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4452       public Builder setDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4453         if (destServerNameBuilder_ == null) {
4454           if (value == null) {
4455             throw new NullPointerException();
4456           }
4457           destServerName_ = value;
4458           onChanged();
4459         } else {
4460           destServerNameBuilder_.setMessage(value);
4461         }
4462         bitField0_ |= 0x00000002;
4463         return this;
4464       }
4465       /**
4466        * <code>optional .ServerName dest_server_name = 2;</code>
4467        */
setDestServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)4468       public Builder setDestServerName(
4469           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
4470         if (destServerNameBuilder_ == null) {
4471           destServerName_ = builderForValue.build();
4472           onChanged();
4473         } else {
4474           destServerNameBuilder_.setMessage(builderForValue.build());
4475         }
4476         bitField0_ |= 0x00000002;
4477         return this;
4478       }
4479       /**
4480        * <code>optional .ServerName dest_server_name = 2;</code>
4481        */
mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4482       public Builder mergeDestServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4483         if (destServerNameBuilder_ == null) {
4484           if (((bitField0_ & 0x00000002) == 0x00000002) &&
4485               destServerName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
4486             destServerName_ =
4487               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destServerName_).mergeFrom(value).buildPartial();
4488           } else {
4489             destServerName_ = value;
4490           }
4491           onChanged();
4492         } else {
4493           destServerNameBuilder_.mergeFrom(value);
4494         }
4495         bitField0_ |= 0x00000002;
4496         return this;
4497       }
4498       /**
4499        * <code>optional .ServerName dest_server_name = 2;</code>
4500        */
clearDestServerName()4501       public Builder clearDestServerName() {
4502         if (destServerNameBuilder_ == null) {
4503           destServerName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
4504           onChanged();
4505         } else {
4506           destServerNameBuilder_.clear();
4507         }
4508         bitField0_ = (bitField0_ & ~0x00000002);
4509         return this;
4510       }
4511       /**
4512        * <code>optional .ServerName dest_server_name = 2;</code>
4513        */
getDestServerNameBuilder()4514       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestServerNameBuilder() {
4515         bitField0_ |= 0x00000002;
4516         onChanged();
4517         return getDestServerNameFieldBuilder().getBuilder();
4518       }
4519       /**
4520        * <code>optional .ServerName dest_server_name = 2;</code>
4521        */
getDestServerNameOrBuilder()4522       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestServerNameOrBuilder() {
4523         if (destServerNameBuilder_ != null) {
4524           return destServerNameBuilder_.getMessageOrBuilder();
4525         } else {
4526           return destServerName_;
4527         }
4528       }
4529       /**
4530        * <code>optional .ServerName dest_server_name = 2;</code>
4531        */
4532       private com.google.protobuf.SingleFieldBuilder<
4533           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
getDestServerNameFieldBuilder()4534           getDestServerNameFieldBuilder() {
4535         if (destServerNameBuilder_ == null) {
4536           destServerNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
4537               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
4538                   destServerName_,
4539                   getParentForChildren(),
4540                   isClean());
4541           destServerName_ = null;
4542         }
4543         return destServerNameBuilder_;
4544       }
4545 
4546       // @@protoc_insertion_point(builder_scope:MoveRegionRequest)
4547     }
4548 
4549     static {
4550       defaultInstance = new MoveRegionRequest(true);
defaultInstance.initFields()4551       defaultInstance.initFields();
4552     }
4553 
4554     // @@protoc_insertion_point(class_scope:MoveRegionRequest)
4555   }
4556 
4557   public interface MoveRegionResponseOrBuilder
4558       extends com.google.protobuf.MessageOrBuilder {
4559   }
4560   /**
4561    * Protobuf type {@code MoveRegionResponse}
4562    */
4563   public static final class MoveRegionResponse extends
4564       com.google.protobuf.GeneratedMessage
4565       implements MoveRegionResponseOrBuilder {
4566     // Use MoveRegionResponse.newBuilder() to construct.
MoveRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)4567     private MoveRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4568       super(builder);
4569       this.unknownFields = builder.getUnknownFields();
4570     }
MoveRegionResponse(boolean noInit)4571     private MoveRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4572 
4573     private static final MoveRegionResponse defaultInstance;
getDefaultInstance()4574     public static MoveRegionResponse getDefaultInstance() {
4575       return defaultInstance;
4576     }
4577 
getDefaultInstanceForType()4578     public MoveRegionResponse getDefaultInstanceForType() {
4579       return defaultInstance;
4580     }
4581 
4582     private final com.google.protobuf.UnknownFieldSet unknownFields;
4583     @java.lang.Override
4584     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()4585         getUnknownFields() {
4586       return this.unknownFields;
4587     }
MoveRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4588     private MoveRegionResponse(
4589         com.google.protobuf.CodedInputStream input,
4590         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4591         throws com.google.protobuf.InvalidProtocolBufferException {
4592       initFields();
4593       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4594           com.google.protobuf.UnknownFieldSet.newBuilder();
4595       try {
4596         boolean done = false;
4597         while (!done) {
4598           int tag = input.readTag();
4599           switch (tag) {
4600             case 0:
4601               done = true;
4602               break;
4603             default: {
4604               if (!parseUnknownField(input, unknownFields,
4605                                      extensionRegistry, tag)) {
4606                 done = true;
4607               }
4608               break;
4609             }
4610           }
4611         }
4612       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4613         throw e.setUnfinishedMessage(this);
4614       } catch (java.io.IOException e) {
4615         throw new com.google.protobuf.InvalidProtocolBufferException(
4616             e.getMessage()).setUnfinishedMessage(this);
4617       } finally {
4618         this.unknownFields = unknownFields.build();
4619         makeExtensionsImmutable();
4620       }
4621     }
4622     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4623         getDescriptor() {
4624       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_descriptor;
4625     }
4626 
4627     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4628         internalGetFieldAccessorTable() {
4629       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_fieldAccessorTable
4630           .ensureFieldAccessorsInitialized(
4631               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.Builder.class);
4632     }
4633 
4634     public static com.google.protobuf.Parser<MoveRegionResponse> PARSER =
4635         new com.google.protobuf.AbstractParser<MoveRegionResponse>() {
4636       public MoveRegionResponse parsePartialFrom(
4637           com.google.protobuf.CodedInputStream input,
4638           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4639           throws com.google.protobuf.InvalidProtocolBufferException {
4640         return new MoveRegionResponse(input, extensionRegistry);
4641       }
4642     };
4643 
4644     @java.lang.Override
getParserForType()4645     public com.google.protobuf.Parser<MoveRegionResponse> getParserForType() {
4646       return PARSER;
4647     }
4648 
initFields()4649     private void initFields() {
4650     }
4651     private byte memoizedIsInitialized = -1;
isInitialized()4652     public final boolean isInitialized() {
4653       byte isInitialized = memoizedIsInitialized;
4654       if (isInitialized != -1) return isInitialized == 1;
4655 
4656       memoizedIsInitialized = 1;
4657       return true;
4658     }
4659 
writeTo(com.google.protobuf.CodedOutputStream output)4660     public void writeTo(com.google.protobuf.CodedOutputStream output)
4661                         throws java.io.IOException {
4662       getSerializedSize();
4663       getUnknownFields().writeTo(output);
4664     }
4665 
4666     private int memoizedSerializedSize = -1;
getSerializedSize()4667     public int getSerializedSize() {
4668       int size = memoizedSerializedSize;
4669       if (size != -1) return size;
4670 
4671       size = 0;
4672       size += getUnknownFields().getSerializedSize();
4673       memoizedSerializedSize = size;
4674       return size;
4675     }
4676 
4677     private static final long serialVersionUID = 0L;
4678     @java.lang.Override
writeReplace()4679     protected java.lang.Object writeReplace()
4680         throws java.io.ObjectStreamException {
4681       return super.writeReplace();
4682     }
4683 
4684     @java.lang.Override
equals(final java.lang.Object obj)4685     public boolean equals(final java.lang.Object obj) {
4686       if (obj == this) {
4687        return true;
4688       }
4689       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse)) {
4690         return super.equals(obj);
4691       }
4692       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) obj;
4693 
4694       boolean result = true;
4695       result = result &&
4696           getUnknownFields().equals(other.getUnknownFields());
4697       return result;
4698     }
4699 
4700     private int memoizedHashCode = 0;
4701     @java.lang.Override
hashCode()4702     public int hashCode() {
4703       if (memoizedHashCode != 0) {
4704         return memoizedHashCode;
4705       }
4706       int hash = 41;
4707       hash = (19 * hash) + getDescriptorForType().hashCode();
4708       hash = (29 * hash) + getUnknownFields().hashCode();
4709       memoizedHashCode = hash;
4710       return hash;
4711     }
4712 
parseFrom( com.google.protobuf.ByteString data)4713     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(
4714         com.google.protobuf.ByteString data)
4715         throws com.google.protobuf.InvalidProtocolBufferException {
4716       return PARSER.parseFrom(data);
4717     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4718     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(
4719         com.google.protobuf.ByteString data,
4720         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4721         throws com.google.protobuf.InvalidProtocolBufferException {
4722       return PARSER.parseFrom(data, extensionRegistry);
4723     }
parseFrom(byte[] data)4724     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(byte[] data)
4725         throws com.google.protobuf.InvalidProtocolBufferException {
4726       return PARSER.parseFrom(data);
4727     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4728     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(
4729         byte[] data,
4730         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4731         throws com.google.protobuf.InvalidProtocolBufferException {
4732       return PARSER.parseFrom(data, extensionRegistry);
4733     }
parseFrom(java.io.InputStream input)4734     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(java.io.InputStream input)
4735         throws java.io.IOException {
4736       return PARSER.parseFrom(input);
4737     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4738     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(
4739         java.io.InputStream input,
4740         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4741         throws java.io.IOException {
4742       return PARSER.parseFrom(input, extensionRegistry);
4743     }
parseDelimitedFrom(java.io.InputStream input)4744     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseDelimitedFrom(java.io.InputStream input)
4745         throws java.io.IOException {
4746       return PARSER.parseDelimitedFrom(input);
4747     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4748     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseDelimitedFrom(
4749         java.io.InputStream input,
4750         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4751         throws java.io.IOException {
4752       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4753     }
parseFrom( com.google.protobuf.CodedInputStream input)4754     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(
4755         com.google.protobuf.CodedInputStream input)
4756         throws java.io.IOException {
4757       return PARSER.parseFrom(input);
4758     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4759     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parseFrom(
4760         com.google.protobuf.CodedInputStream input,
4761         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4762         throws java.io.IOException {
4763       return PARSER.parseFrom(input, extensionRegistry);
4764     }
4765 
newBuilder()4766     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()4767     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse prototype)4768     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse prototype) {
4769       return newBuilder().mergeFrom(prototype);
4770     }
toBuilder()4771     public Builder toBuilder() { return newBuilder(this); }
4772 
4773     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4774     protected Builder newBuilderForType(
4775         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4776       Builder builder = new Builder(parent);
4777       return builder;
4778     }
4779     /**
4780      * Protobuf type {@code MoveRegionResponse}
4781      */
4782     public static final class Builder extends
4783         com.google.protobuf.GeneratedMessage.Builder<Builder>
4784        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponseOrBuilder {
4785       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4786           getDescriptor() {
4787         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_descriptor;
4788       }
4789 
4790       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4791           internalGetFieldAccessorTable() {
4792         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_fieldAccessorTable
4793             .ensureFieldAccessorsInitialized(
4794                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.Builder.class);
4795       }
4796 
4797       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.newBuilder()
Builder()4798       private Builder() {
4799         maybeForceBuilderInitialization();
4800       }
4801 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4802       private Builder(
4803           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4804         super(parent);
4805         maybeForceBuilderInitialization();
4806       }
maybeForceBuilderInitialization()4807       private void maybeForceBuilderInitialization() {
4808         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4809         }
4810       }
create()4811       private static Builder create() {
4812         return new Builder();
4813       }
4814 
clear()4815       public Builder clear() {
4816         super.clear();
4817         return this;
4818       }
4819 
clone()4820       public Builder clone() {
4821         return create().mergeFrom(buildPartial());
4822       }
4823 
4824       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4825           getDescriptorForType() {
4826         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MoveRegionResponse_descriptor;
4827       }
4828 
getDefaultInstanceForType()4829       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse getDefaultInstanceForType() {
4830         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance();
4831       }
4832 
build()4833       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse build() {
4834         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse result = buildPartial();
4835         if (!result.isInitialized()) {
4836           throw newUninitializedMessageException(result);
4837         }
4838         return result;
4839       }
4840 
buildPartial()4841       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse buildPartial() {
4842         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse(this);
4843         onBuilt();
4844         return result;
4845       }
4846 
mergeFrom(com.google.protobuf.Message other)4847       public Builder mergeFrom(com.google.protobuf.Message other) {
4848         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) {
4849           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse)other);
4850         } else {
4851           super.mergeFrom(other);
4852           return this;
4853         }
4854       }
4855 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse other)4856       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse other) {
4857         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance()) return this;
4858         this.mergeUnknownFields(other.getUnknownFields());
4859         return this;
4860       }
4861 
isInitialized()4862       public final boolean isInitialized() {
4863         return true;
4864       }
4865 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4866       public Builder mergeFrom(
4867           com.google.protobuf.CodedInputStream input,
4868           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4869           throws java.io.IOException {
4870         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse parsedMessage = null;
4871         try {
4872           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4873         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4874           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) e.getUnfinishedMessage();
4875           throw e;
4876         } finally {
4877           if (parsedMessage != null) {
4878             mergeFrom(parsedMessage);
4879           }
4880         }
4881         return this;
4882       }
4883 
4884       // @@protoc_insertion_point(builder_scope:MoveRegionResponse)
4885     }
4886 
4887     static {
4888       defaultInstance = new MoveRegionResponse(true);
defaultInstance.initFields()4889       defaultInstance.initFields();
4890     }
4891 
4892     // @@protoc_insertion_point(class_scope:MoveRegionResponse)
4893   }
4894 
4895   public interface DispatchMergingRegionsRequestOrBuilder
4896       extends com.google.protobuf.MessageOrBuilder {
4897 
4898     // required .RegionSpecifier region_a = 1;
4899     /**
4900      * <code>required .RegionSpecifier region_a = 1;</code>
4901      */
hasRegionA()4902     boolean hasRegionA();
4903     /**
4904      * <code>required .RegionSpecifier region_a = 1;</code>
4905      */
getRegionA()4906     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA();
4907     /**
4908      * <code>required .RegionSpecifier region_a = 1;</code>
4909      */
getRegionAOrBuilder()4910     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder();
4911 
4912     // required .RegionSpecifier region_b = 2;
4913     /**
4914      * <code>required .RegionSpecifier region_b = 2;</code>
4915      */
hasRegionB()4916     boolean hasRegionB();
4917     /**
4918      * <code>required .RegionSpecifier region_b = 2;</code>
4919      */
getRegionB()4920     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB();
4921     /**
4922      * <code>required .RegionSpecifier region_b = 2;</code>
4923      */
getRegionBOrBuilder()4924     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder();
4925 
4926     // optional bool forcible = 3 [default = false];
4927     /**
4928      * <code>optional bool forcible = 3 [default = false];</code>
4929      */
hasForcible()4930     boolean hasForcible();
4931     /**
4932      * <code>optional bool forcible = 3 [default = false];</code>
4933      */
getForcible()4934     boolean getForcible();
4935   }
4936   /**
4937    * Protobuf type {@code DispatchMergingRegionsRequest}
4938    *
4939    * <pre>
4940    **
4941    * Dispatch merging the specified regions.
4942    * </pre>
4943    */
4944   public static final class DispatchMergingRegionsRequest extends
4945       com.google.protobuf.GeneratedMessage
4946       implements DispatchMergingRegionsRequestOrBuilder {
4947     // Use DispatchMergingRegionsRequest.newBuilder() to construct.
DispatchMergingRegionsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)4948     private DispatchMergingRegionsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4949       super(builder);
4950       this.unknownFields = builder.getUnknownFields();
4951     }
DispatchMergingRegionsRequest(boolean noInit)4952     private DispatchMergingRegionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4953 
4954     private static final DispatchMergingRegionsRequest defaultInstance;
getDefaultInstance()4955     public static DispatchMergingRegionsRequest getDefaultInstance() {
4956       return defaultInstance;
4957     }
4958 
getDefaultInstanceForType()4959     public DispatchMergingRegionsRequest getDefaultInstanceForType() {
4960       return defaultInstance;
4961     }
4962 
4963     private final com.google.protobuf.UnknownFieldSet unknownFields;
4964     @java.lang.Override
4965     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()4966         getUnknownFields() {
4967       return this.unknownFields;
4968     }
DispatchMergingRegionsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4969     private DispatchMergingRegionsRequest(
4970         com.google.protobuf.CodedInputStream input,
4971         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4972         throws com.google.protobuf.InvalidProtocolBufferException {
4973       initFields();
4974       int mutable_bitField0_ = 0;
4975       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4976           com.google.protobuf.UnknownFieldSet.newBuilder();
4977       try {
4978         boolean done = false;
4979         while (!done) {
4980           int tag = input.readTag();
4981           switch (tag) {
4982             case 0:
4983               done = true;
4984               break;
4985             default: {
4986               if (!parseUnknownField(input, unknownFields,
4987                                      extensionRegistry, tag)) {
4988                 done = true;
4989               }
4990               break;
4991             }
4992             case 10: {
4993               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
4994               if (((bitField0_ & 0x00000001) == 0x00000001)) {
4995                 subBuilder = regionA_.toBuilder();
4996               }
4997               regionA_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
4998               if (subBuilder != null) {
4999                 subBuilder.mergeFrom(regionA_);
5000                 regionA_ = subBuilder.buildPartial();
5001               }
5002               bitField0_ |= 0x00000001;
5003               break;
5004             }
5005             case 18: {
5006               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
5007               if (((bitField0_ & 0x00000002) == 0x00000002)) {
5008                 subBuilder = regionB_.toBuilder();
5009               }
5010               regionB_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
5011               if (subBuilder != null) {
5012                 subBuilder.mergeFrom(regionB_);
5013                 regionB_ = subBuilder.buildPartial();
5014               }
5015               bitField0_ |= 0x00000002;
5016               break;
5017             }
5018             case 24: {
5019               bitField0_ |= 0x00000004;
5020               forcible_ = input.readBool();
5021               break;
5022             }
5023           }
5024         }
5025       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5026         throw e.setUnfinishedMessage(this);
5027       } catch (java.io.IOException e) {
5028         throw new com.google.protobuf.InvalidProtocolBufferException(
5029             e.getMessage()).setUnfinishedMessage(this);
5030       } finally {
5031         this.unknownFields = unknownFields.build();
5032         makeExtensionsImmutable();
5033       }
5034     }
5035     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5036         getDescriptor() {
5037       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_descriptor;
5038     }
5039 
5040     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5041         internalGetFieldAccessorTable() {
5042       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable
5043           .ensureFieldAccessorsInitialized(
5044               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.Builder.class);
5045     }
5046 
5047     public static com.google.protobuf.Parser<DispatchMergingRegionsRequest> PARSER =
5048         new com.google.protobuf.AbstractParser<DispatchMergingRegionsRequest>() {
5049       public DispatchMergingRegionsRequest parsePartialFrom(
5050           com.google.protobuf.CodedInputStream input,
5051           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5052           throws com.google.protobuf.InvalidProtocolBufferException {
5053         return new DispatchMergingRegionsRequest(input, extensionRegistry);
5054       }
5055     };
5056 
5057     @java.lang.Override
getParserForType()5058     public com.google.protobuf.Parser<DispatchMergingRegionsRequest> getParserForType() {
5059       return PARSER;
5060     }
5061 
5062     private int bitField0_;
5063     // required .RegionSpecifier region_a = 1;
5064     public static final int REGION_A_FIELD_NUMBER = 1;
5065     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_;
5066     /**
5067      * <code>required .RegionSpecifier region_a = 1;</code>
5068      */
hasRegionA()5069     public boolean hasRegionA() {
5070       return ((bitField0_ & 0x00000001) == 0x00000001);
5071     }
5072     /**
5073      * <code>required .RegionSpecifier region_a = 1;</code>
5074      */
getRegionA()5075     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() {
5076       return regionA_;
5077     }
5078     /**
5079      * <code>required .RegionSpecifier region_a = 1;</code>
5080      */
getRegionAOrBuilder()5081     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() {
5082       return regionA_;
5083     }
5084 
5085     // required .RegionSpecifier region_b = 2;
5086     public static final int REGION_B_FIELD_NUMBER = 2;
5087     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_;
5088     /**
5089      * <code>required .RegionSpecifier region_b = 2;</code>
5090      */
hasRegionB()5091     public boolean hasRegionB() {
5092       return ((bitField0_ & 0x00000002) == 0x00000002);
5093     }
5094     /**
5095      * <code>required .RegionSpecifier region_b = 2;</code>
5096      */
getRegionB()5097     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() {
5098       return regionB_;
5099     }
5100     /**
5101      * <code>required .RegionSpecifier region_b = 2;</code>
5102      */
getRegionBOrBuilder()5103     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() {
5104       return regionB_;
5105     }
5106 
5107     // optional bool forcible = 3 [default = false];
5108     public static final int FORCIBLE_FIELD_NUMBER = 3;
5109     private boolean forcible_;
5110     /**
5111      * <code>optional bool forcible = 3 [default = false];</code>
5112      */
hasForcible()5113     public boolean hasForcible() {
5114       return ((bitField0_ & 0x00000004) == 0x00000004);
5115     }
5116     /**
5117      * <code>optional bool forcible = 3 [default = false];</code>
5118      */
getForcible()5119     public boolean getForcible() {
5120       return forcible_;
5121     }
5122 
initFields()5123     private void initFields() {
5124       regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5125       regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5126       forcible_ = false;
5127     }
5128     private byte memoizedIsInitialized = -1;
isInitialized()5129     public final boolean isInitialized() {
5130       byte isInitialized = memoizedIsInitialized;
5131       if (isInitialized != -1) return isInitialized == 1;
5132 
5133       if (!hasRegionA()) {
5134         memoizedIsInitialized = 0;
5135         return false;
5136       }
5137       if (!hasRegionB()) {
5138         memoizedIsInitialized = 0;
5139         return false;
5140       }
5141       if (!getRegionA().isInitialized()) {
5142         memoizedIsInitialized = 0;
5143         return false;
5144       }
5145       if (!getRegionB().isInitialized()) {
5146         memoizedIsInitialized = 0;
5147         return false;
5148       }
5149       memoizedIsInitialized = 1;
5150       return true;
5151     }
5152 
writeTo(com.google.protobuf.CodedOutputStream output)5153     public void writeTo(com.google.protobuf.CodedOutputStream output)
5154                         throws java.io.IOException {
5155       getSerializedSize();
5156       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5157         output.writeMessage(1, regionA_);
5158       }
5159       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5160         output.writeMessage(2, regionB_);
5161       }
5162       if (((bitField0_ & 0x00000004) == 0x00000004)) {
5163         output.writeBool(3, forcible_);
5164       }
5165       getUnknownFields().writeTo(output);
5166     }
5167 
5168     private int memoizedSerializedSize = -1;
getSerializedSize()5169     public int getSerializedSize() {
5170       int size = memoizedSerializedSize;
5171       if (size != -1) return size;
5172 
5173       size = 0;
5174       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5175         size += com.google.protobuf.CodedOutputStream
5176           .computeMessageSize(1, regionA_);
5177       }
5178       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5179         size += com.google.protobuf.CodedOutputStream
5180           .computeMessageSize(2, regionB_);
5181       }
5182       if (((bitField0_ & 0x00000004) == 0x00000004)) {
5183         size += com.google.protobuf.CodedOutputStream
5184           .computeBoolSize(3, forcible_);
5185       }
5186       size += getUnknownFields().getSerializedSize();
5187       memoizedSerializedSize = size;
5188       return size;
5189     }
5190 
5191     private static final long serialVersionUID = 0L;
5192     @java.lang.Override
writeReplace()5193     protected java.lang.Object writeReplace()
5194         throws java.io.ObjectStreamException {
5195       return super.writeReplace();
5196     }
5197 
5198     @java.lang.Override
equals(final java.lang.Object obj)5199     public boolean equals(final java.lang.Object obj) {
5200       if (obj == this) {
5201        return true;
5202       }
5203       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)) {
5204         return super.equals(obj);
5205       }
5206       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) obj;
5207 
5208       boolean result = true;
5209       result = result && (hasRegionA() == other.hasRegionA());
5210       if (hasRegionA()) {
5211         result = result && getRegionA()
5212             .equals(other.getRegionA());
5213       }
5214       result = result && (hasRegionB() == other.hasRegionB());
5215       if (hasRegionB()) {
5216         result = result && getRegionB()
5217             .equals(other.getRegionB());
5218       }
5219       result = result && (hasForcible() == other.hasForcible());
5220       if (hasForcible()) {
5221         result = result && (getForcible()
5222             == other.getForcible());
5223       }
5224       result = result &&
5225           getUnknownFields().equals(other.getUnknownFields());
5226       return result;
5227     }
5228 
5229     private int memoizedHashCode = 0;
5230     @java.lang.Override
hashCode()5231     public int hashCode() {
5232       if (memoizedHashCode != 0) {
5233         return memoizedHashCode;
5234       }
5235       int hash = 41;
5236       hash = (19 * hash) + getDescriptorForType().hashCode();
5237       if (hasRegionA()) {
5238         hash = (37 * hash) + REGION_A_FIELD_NUMBER;
5239         hash = (53 * hash) + getRegionA().hashCode();
5240       }
5241       if (hasRegionB()) {
5242         hash = (37 * hash) + REGION_B_FIELD_NUMBER;
5243         hash = (53 * hash) + getRegionB().hashCode();
5244       }
5245       if (hasForcible()) {
5246         hash = (37 * hash) + FORCIBLE_FIELD_NUMBER;
5247         hash = (53 * hash) + hashBoolean(getForcible());
5248       }
5249       hash = (29 * hash) + getUnknownFields().hashCode();
5250       memoizedHashCode = hash;
5251       return hash;
5252     }
5253 
parseFrom( com.google.protobuf.ByteString data)5254     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(
5255         com.google.protobuf.ByteString data)
5256         throws com.google.protobuf.InvalidProtocolBufferException {
5257       return PARSER.parseFrom(data);
5258     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5259     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(
5260         com.google.protobuf.ByteString data,
5261         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5262         throws com.google.protobuf.InvalidProtocolBufferException {
5263       return PARSER.parseFrom(data, extensionRegistry);
5264     }
parseFrom(byte[] data)5265     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(byte[] data)
5266         throws com.google.protobuf.InvalidProtocolBufferException {
5267       return PARSER.parseFrom(data);
5268     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5269     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(
5270         byte[] data,
5271         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5272         throws com.google.protobuf.InvalidProtocolBufferException {
5273       return PARSER.parseFrom(data, extensionRegistry);
5274     }
parseFrom(java.io.InputStream input)5275     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(java.io.InputStream input)
5276         throws java.io.IOException {
5277       return PARSER.parseFrom(input);
5278     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5279     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(
5280         java.io.InputStream input,
5281         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5282         throws java.io.IOException {
5283       return PARSER.parseFrom(input, extensionRegistry);
5284     }
parseDelimitedFrom(java.io.InputStream input)5285     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseDelimitedFrom(java.io.InputStream input)
5286         throws java.io.IOException {
5287       return PARSER.parseDelimitedFrom(input);
5288     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5289     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseDelimitedFrom(
5290         java.io.InputStream input,
5291         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5292         throws java.io.IOException {
5293       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5294     }
parseFrom( com.google.protobuf.CodedInputStream input)5295     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(
5296         com.google.protobuf.CodedInputStream input)
5297         throws java.io.IOException {
5298       return PARSER.parseFrom(input);
5299     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5300     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parseFrom(
5301         com.google.protobuf.CodedInputStream input,
5302         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5303         throws java.io.IOException {
5304       return PARSER.parseFrom(input, extensionRegistry);
5305     }
5306 
newBuilder()5307     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()5308     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest prototype)5309     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest prototype) {
5310       return newBuilder().mergeFrom(prototype);
5311     }
toBuilder()5312     public Builder toBuilder() { return newBuilder(this); }
5313 
5314     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5315     protected Builder newBuilderForType(
5316         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5317       Builder builder = new Builder(parent);
5318       return builder;
5319     }
5320     /**
5321      * Protobuf type {@code DispatchMergingRegionsRequest}
5322      *
5323      * <pre>
5324      **
5325      * Dispatch merging the specified regions.
5326      * </pre>
5327      */
5328     public static final class Builder extends
5329         com.google.protobuf.GeneratedMessage.Builder<Builder>
5330        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequestOrBuilder {
5331       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5332           getDescriptor() {
5333         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_descriptor;
5334       }
5335 
5336       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5337           internalGetFieldAccessorTable() {
5338         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_fieldAccessorTable
5339             .ensureFieldAccessorsInitialized(
5340                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.Builder.class);
5341       }
5342 
5343       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.newBuilder()
Builder()5344       private Builder() {
5345         maybeForceBuilderInitialization();
5346       }
5347 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5348       private Builder(
5349           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5350         super(parent);
5351         maybeForceBuilderInitialization();
5352       }
maybeForceBuilderInitialization()5353       private void maybeForceBuilderInitialization() {
5354         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5355           getRegionAFieldBuilder();
5356           getRegionBFieldBuilder();
5357         }
5358       }
create()5359       private static Builder create() {
5360         return new Builder();
5361       }
5362 
clear()5363       public Builder clear() {
5364         super.clear();
5365         if (regionABuilder_ == null) {
5366           regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5367         } else {
5368           regionABuilder_.clear();
5369         }
5370         bitField0_ = (bitField0_ & ~0x00000001);
5371         if (regionBBuilder_ == null) {
5372           regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5373         } else {
5374           regionBBuilder_.clear();
5375         }
5376         bitField0_ = (bitField0_ & ~0x00000002);
5377         forcible_ = false;
5378         bitField0_ = (bitField0_ & ~0x00000004);
5379         return this;
5380       }
5381 
clone()5382       public Builder clone() {
5383         return create().mergeFrom(buildPartial());
5384       }
5385 
5386       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()5387           getDescriptorForType() {
5388         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsRequest_descriptor;
5389       }
5390 
getDefaultInstanceForType()5391       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest getDefaultInstanceForType() {
5392         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance();
5393       }
5394 
build()5395       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest build() {
5396         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest result = buildPartial();
5397         if (!result.isInitialized()) {
5398           throw newUninitializedMessageException(result);
5399         }
5400         return result;
5401       }
5402 
buildPartial()5403       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest buildPartial() {
5404         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest(this);
5405         int from_bitField0_ = bitField0_;
5406         int to_bitField0_ = 0;
5407         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5408           to_bitField0_ |= 0x00000001;
5409         }
5410         if (regionABuilder_ == null) {
5411           result.regionA_ = regionA_;
5412         } else {
5413           result.regionA_ = regionABuilder_.build();
5414         }
5415         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5416           to_bitField0_ |= 0x00000002;
5417         }
5418         if (regionBBuilder_ == null) {
5419           result.regionB_ = regionB_;
5420         } else {
5421           result.regionB_ = regionBBuilder_.build();
5422         }
5423         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
5424           to_bitField0_ |= 0x00000004;
5425         }
5426         result.forcible_ = forcible_;
5427         result.bitField0_ = to_bitField0_;
5428         onBuilt();
5429         return result;
5430       }
5431 
mergeFrom(com.google.protobuf.Message other)5432       public Builder mergeFrom(com.google.protobuf.Message other) {
5433         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) {
5434           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)other);
5435         } else {
5436           super.mergeFrom(other);
5437           return this;
5438         }
5439       }
5440 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest other)5441       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest other) {
5442         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance()) return this;
5443         if (other.hasRegionA()) {
5444           mergeRegionA(other.getRegionA());
5445         }
5446         if (other.hasRegionB()) {
5447           mergeRegionB(other.getRegionB());
5448         }
5449         if (other.hasForcible()) {
5450           setForcible(other.getForcible());
5451         }
5452         this.mergeUnknownFields(other.getUnknownFields());
5453         return this;
5454       }
5455 
isInitialized()5456       public final boolean isInitialized() {
5457         if (!hasRegionA()) {
5458 
5459           return false;
5460         }
5461         if (!hasRegionB()) {
5462 
5463           return false;
5464         }
5465         if (!getRegionA().isInitialized()) {
5466 
5467           return false;
5468         }
5469         if (!getRegionB().isInitialized()) {
5470 
5471           return false;
5472         }
5473         return true;
5474       }
5475 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5476       public Builder mergeFrom(
5477           com.google.protobuf.CodedInputStream input,
5478           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5479           throws java.io.IOException {
5480         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest parsedMessage = null;
5481         try {
5482           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5483         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5484           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest) e.getUnfinishedMessage();
5485           throw e;
5486         } finally {
5487           if (parsedMessage != null) {
5488             mergeFrom(parsedMessage);
5489           }
5490         }
5491         return this;
5492       }
5493       private int bitField0_;
5494 
5495       // required .RegionSpecifier region_a = 1;
5496       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5497       private com.google.protobuf.SingleFieldBuilder<
5498           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionABuilder_;
5499       /**
5500        * <code>required .RegionSpecifier region_a = 1;</code>
5501        */
hasRegionA()5502       public boolean hasRegionA() {
5503         return ((bitField0_ & 0x00000001) == 0x00000001);
5504       }
5505       /**
5506        * <code>required .RegionSpecifier region_a = 1;</code>
5507        */
getRegionA()5508       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionA() {
5509         if (regionABuilder_ == null) {
5510           return regionA_;
5511         } else {
5512           return regionABuilder_.getMessage();
5513         }
5514       }
5515       /**
5516        * <code>required .RegionSpecifier region_a = 1;</code>
5517        */
setRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)5518       public Builder setRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
5519         if (regionABuilder_ == null) {
5520           if (value == null) {
5521             throw new NullPointerException();
5522           }
5523           regionA_ = value;
5524           onChanged();
5525         } else {
5526           regionABuilder_.setMessage(value);
5527         }
5528         bitField0_ |= 0x00000001;
5529         return this;
5530       }
5531       /**
5532        * <code>required .RegionSpecifier region_a = 1;</code>
5533        */
setRegionA( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)5534       public Builder setRegionA(
5535           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
5536         if (regionABuilder_ == null) {
5537           regionA_ = builderForValue.build();
5538           onChanged();
5539         } else {
5540           regionABuilder_.setMessage(builderForValue.build());
5541         }
5542         bitField0_ |= 0x00000001;
5543         return this;
5544       }
5545       /**
5546        * <code>required .RegionSpecifier region_a = 1;</code>
5547        */
mergeRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)5548       public Builder mergeRegionA(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
5549         if (regionABuilder_ == null) {
5550           if (((bitField0_ & 0x00000001) == 0x00000001) &&
5551               regionA_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
5552             regionA_ =
5553               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionA_).mergeFrom(value).buildPartial();
5554           } else {
5555             regionA_ = value;
5556           }
5557           onChanged();
5558         } else {
5559           regionABuilder_.mergeFrom(value);
5560         }
5561         bitField0_ |= 0x00000001;
5562         return this;
5563       }
5564       /**
5565        * <code>required .RegionSpecifier region_a = 1;</code>
5566        */
clearRegionA()5567       public Builder clearRegionA() {
5568         if (regionABuilder_ == null) {
5569           regionA_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5570           onChanged();
5571         } else {
5572           regionABuilder_.clear();
5573         }
5574         bitField0_ = (bitField0_ & ~0x00000001);
5575         return this;
5576       }
5577       /**
5578        * <code>required .RegionSpecifier region_a = 1;</code>
5579        */
getRegionABuilder()5580       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionABuilder() {
5581         bitField0_ |= 0x00000001;
5582         onChanged();
5583         return getRegionAFieldBuilder().getBuilder();
5584       }
5585       /**
5586        * <code>required .RegionSpecifier region_a = 1;</code>
5587        */
getRegionAOrBuilder()5588       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionAOrBuilder() {
5589         if (regionABuilder_ != null) {
5590           return regionABuilder_.getMessageOrBuilder();
5591         } else {
5592           return regionA_;
5593         }
5594       }
5595       /**
5596        * <code>required .RegionSpecifier region_a = 1;</code>
5597        */
5598       private com.google.protobuf.SingleFieldBuilder<
5599           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionAFieldBuilder()5600           getRegionAFieldBuilder() {
5601         if (regionABuilder_ == null) {
5602           regionABuilder_ = new com.google.protobuf.SingleFieldBuilder<
5603               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
5604                   regionA_,
5605                   getParentForChildren(),
5606                   isClean());
5607           regionA_ = null;
5608         }
5609         return regionABuilder_;
5610       }
5611 
5612       // required .RegionSpecifier region_b = 2;
5613       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5614       private com.google.protobuf.SingleFieldBuilder<
5615           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBBuilder_;
5616       /**
5617        * <code>required .RegionSpecifier region_b = 2;</code>
5618        */
hasRegionB()5619       public boolean hasRegionB() {
5620         return ((bitField0_ & 0x00000002) == 0x00000002);
5621       }
5622       /**
5623        * <code>required .RegionSpecifier region_b = 2;</code>
5624        */
getRegionB()5625       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegionB() {
5626         if (regionBBuilder_ == null) {
5627           return regionB_;
5628         } else {
5629           return regionBBuilder_.getMessage();
5630         }
5631       }
5632       /**
5633        * <code>required .RegionSpecifier region_b = 2;</code>
5634        */
setRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)5635       public Builder setRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
5636         if (regionBBuilder_ == null) {
5637           if (value == null) {
5638             throw new NullPointerException();
5639           }
5640           regionB_ = value;
5641           onChanged();
5642         } else {
5643           regionBBuilder_.setMessage(value);
5644         }
5645         bitField0_ |= 0x00000002;
5646         return this;
5647       }
5648       /**
5649        * <code>required .RegionSpecifier region_b = 2;</code>
5650        */
setRegionB( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)5651       public Builder setRegionB(
5652           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
5653         if (regionBBuilder_ == null) {
5654           regionB_ = builderForValue.build();
5655           onChanged();
5656         } else {
5657           regionBBuilder_.setMessage(builderForValue.build());
5658         }
5659         bitField0_ |= 0x00000002;
5660         return this;
5661       }
5662       /**
5663        * <code>required .RegionSpecifier region_b = 2;</code>
5664        */
mergeRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)5665       public Builder mergeRegionB(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
5666         if (regionBBuilder_ == null) {
5667           if (((bitField0_ & 0x00000002) == 0x00000002) &&
5668               regionB_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
5669             regionB_ =
5670               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(regionB_).mergeFrom(value).buildPartial();
5671           } else {
5672             regionB_ = value;
5673           }
5674           onChanged();
5675         } else {
5676           regionBBuilder_.mergeFrom(value);
5677         }
5678         bitField0_ |= 0x00000002;
5679         return this;
5680       }
5681       /**
5682        * <code>required .RegionSpecifier region_b = 2;</code>
5683        */
clearRegionB()5684       public Builder clearRegionB() {
5685         if (regionBBuilder_ == null) {
5686           regionB_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
5687           onChanged();
5688         } else {
5689           regionBBuilder_.clear();
5690         }
5691         bitField0_ = (bitField0_ & ~0x00000002);
5692         return this;
5693       }
5694       /**
5695        * <code>required .RegionSpecifier region_b = 2;</code>
5696        */
getRegionBBuilder()5697       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBBuilder() {
5698         bitField0_ |= 0x00000002;
5699         onChanged();
5700         return getRegionBFieldBuilder().getBuilder();
5701       }
5702       /**
5703        * <code>required .RegionSpecifier region_b = 2;</code>
5704        */
getRegionBOrBuilder()5705       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionBOrBuilder() {
5706         if (regionBBuilder_ != null) {
5707           return regionBBuilder_.getMessageOrBuilder();
5708         } else {
5709           return regionB_;
5710         }
5711       }
5712       /**
5713        * <code>required .RegionSpecifier region_b = 2;</code>
5714        */
5715       private com.google.protobuf.SingleFieldBuilder<
5716           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionBFieldBuilder()5717           getRegionBFieldBuilder() {
5718         if (regionBBuilder_ == null) {
5719           regionBBuilder_ = new com.google.protobuf.SingleFieldBuilder<
5720               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
5721                   regionB_,
5722                   getParentForChildren(),
5723                   isClean());
5724           regionB_ = null;
5725         }
5726         return regionBBuilder_;
5727       }
5728 
5729       // optional bool forcible = 3 [default = false];
5730       private boolean forcible_ ;
5731       /**
5732        * <code>optional bool forcible = 3 [default = false];</code>
5733        */
hasForcible()5734       public boolean hasForcible() {
5735         return ((bitField0_ & 0x00000004) == 0x00000004);
5736       }
5737       /**
5738        * <code>optional bool forcible = 3 [default = false];</code>
5739        */
getForcible()5740       public boolean getForcible() {
5741         return forcible_;
5742       }
5743       /**
5744        * <code>optional bool forcible = 3 [default = false];</code>
5745        */
setForcible(boolean value)5746       public Builder setForcible(boolean value) {
5747         bitField0_ |= 0x00000004;
5748         forcible_ = value;
5749         onChanged();
5750         return this;
5751       }
5752       /**
5753        * <code>optional bool forcible = 3 [default = false];</code>
5754        */
clearForcible()5755       public Builder clearForcible() {
5756         bitField0_ = (bitField0_ & ~0x00000004);
5757         forcible_ = false;
5758         onChanged();
5759         return this;
5760       }
5761 
5762       // @@protoc_insertion_point(builder_scope:DispatchMergingRegionsRequest)
5763     }
5764 
5765     static {
5766       defaultInstance = new DispatchMergingRegionsRequest(true);
defaultInstance.initFields()5767       defaultInstance.initFields();
5768     }
5769 
5770     // @@protoc_insertion_point(class_scope:DispatchMergingRegionsRequest)
5771   }
5772 
5773   public interface DispatchMergingRegionsResponseOrBuilder
5774       extends com.google.protobuf.MessageOrBuilder {
5775   }
5776   /**
5777    * Protobuf type {@code DispatchMergingRegionsResponse}
5778    */
5779   public static final class DispatchMergingRegionsResponse extends
5780       com.google.protobuf.GeneratedMessage
5781       implements DispatchMergingRegionsResponseOrBuilder {
5782     // Use DispatchMergingRegionsResponse.newBuilder() to construct.
DispatchMergingRegionsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)5783     private DispatchMergingRegionsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5784       super(builder);
5785       this.unknownFields = builder.getUnknownFields();
5786     }
DispatchMergingRegionsResponse(boolean noInit)5787     private DispatchMergingRegionsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5788 
5789     private static final DispatchMergingRegionsResponse defaultInstance;
getDefaultInstance()5790     public static DispatchMergingRegionsResponse getDefaultInstance() {
5791       return defaultInstance;
5792     }
5793 
getDefaultInstanceForType()5794     public DispatchMergingRegionsResponse getDefaultInstanceForType() {
5795       return defaultInstance;
5796     }
5797 
5798     private final com.google.protobuf.UnknownFieldSet unknownFields;
5799     @java.lang.Override
5800     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()5801         getUnknownFields() {
5802       return this.unknownFields;
5803     }
DispatchMergingRegionsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5804     private DispatchMergingRegionsResponse(
5805         com.google.protobuf.CodedInputStream input,
5806         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5807         throws com.google.protobuf.InvalidProtocolBufferException {
5808       initFields();
5809       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5810           com.google.protobuf.UnknownFieldSet.newBuilder();
5811       try {
5812         boolean done = false;
5813         while (!done) {
5814           int tag = input.readTag();
5815           switch (tag) {
5816             case 0:
5817               done = true;
5818               break;
5819             default: {
5820               if (!parseUnknownField(input, unknownFields,
5821                                      extensionRegistry, tag)) {
5822                 done = true;
5823               }
5824               break;
5825             }
5826           }
5827         }
5828       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5829         throw e.setUnfinishedMessage(this);
5830       } catch (java.io.IOException e) {
5831         throw new com.google.protobuf.InvalidProtocolBufferException(
5832             e.getMessage()).setUnfinishedMessage(this);
5833       } finally {
5834         this.unknownFields = unknownFields.build();
5835         makeExtensionsImmutable();
5836       }
5837     }
5838     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5839         getDescriptor() {
5840       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_descriptor;
5841     }
5842 
5843     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5844         internalGetFieldAccessorTable() {
5845       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable
5846           .ensureFieldAccessorsInitialized(
5847               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.Builder.class);
5848     }
5849 
5850     public static com.google.protobuf.Parser<DispatchMergingRegionsResponse> PARSER =
5851         new com.google.protobuf.AbstractParser<DispatchMergingRegionsResponse>() {
5852       public DispatchMergingRegionsResponse parsePartialFrom(
5853           com.google.protobuf.CodedInputStream input,
5854           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5855           throws com.google.protobuf.InvalidProtocolBufferException {
5856         return new DispatchMergingRegionsResponse(input, extensionRegistry);
5857       }
5858     };
5859 
5860     @java.lang.Override
getParserForType()5861     public com.google.protobuf.Parser<DispatchMergingRegionsResponse> getParserForType() {
5862       return PARSER;
5863     }
5864 
initFields()5865     private void initFields() {
5866     }
5867     private byte memoizedIsInitialized = -1;
isInitialized()5868     public final boolean isInitialized() {
5869       byte isInitialized = memoizedIsInitialized;
5870       if (isInitialized != -1) return isInitialized == 1;
5871 
5872       memoizedIsInitialized = 1;
5873       return true;
5874     }
5875 
writeTo(com.google.protobuf.CodedOutputStream output)5876     public void writeTo(com.google.protobuf.CodedOutputStream output)
5877                         throws java.io.IOException {
5878       getSerializedSize();
5879       getUnknownFields().writeTo(output);
5880     }
5881 
5882     private int memoizedSerializedSize = -1;
getSerializedSize()5883     public int getSerializedSize() {
5884       int size = memoizedSerializedSize;
5885       if (size != -1) return size;
5886 
5887       size = 0;
5888       size += getUnknownFields().getSerializedSize();
5889       memoizedSerializedSize = size;
5890       return size;
5891     }
5892 
5893     private static final long serialVersionUID = 0L;
5894     @java.lang.Override
writeReplace()5895     protected java.lang.Object writeReplace()
5896         throws java.io.ObjectStreamException {
5897       return super.writeReplace();
5898     }
5899 
5900     @java.lang.Override
equals(final java.lang.Object obj)5901     public boolean equals(final java.lang.Object obj) {
5902       if (obj == this) {
5903        return true;
5904       }
5905       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse)) {
5906         return super.equals(obj);
5907       }
5908       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) obj;
5909 
5910       boolean result = true;
5911       result = result &&
5912           getUnknownFields().equals(other.getUnknownFields());
5913       return result;
5914     }
5915 
5916     private int memoizedHashCode = 0;
5917     @java.lang.Override
hashCode()5918     public int hashCode() {
5919       if (memoizedHashCode != 0) {
5920         return memoizedHashCode;
5921       }
5922       int hash = 41;
5923       hash = (19 * hash) + getDescriptorForType().hashCode();
5924       hash = (29 * hash) + getUnknownFields().hashCode();
5925       memoizedHashCode = hash;
5926       return hash;
5927     }
5928 
parseFrom( com.google.protobuf.ByteString data)5929     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(
5930         com.google.protobuf.ByteString data)
5931         throws com.google.protobuf.InvalidProtocolBufferException {
5932       return PARSER.parseFrom(data);
5933     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5934     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(
5935         com.google.protobuf.ByteString data,
5936         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5937         throws com.google.protobuf.InvalidProtocolBufferException {
5938       return PARSER.parseFrom(data, extensionRegistry);
5939     }
parseFrom(byte[] data)5940     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(byte[] data)
5941         throws com.google.protobuf.InvalidProtocolBufferException {
5942       return PARSER.parseFrom(data);
5943     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5944     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(
5945         byte[] data,
5946         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5947         throws com.google.protobuf.InvalidProtocolBufferException {
5948       return PARSER.parseFrom(data, extensionRegistry);
5949     }
parseFrom(java.io.InputStream input)5950     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(java.io.InputStream input)
5951         throws java.io.IOException {
5952       return PARSER.parseFrom(input);
5953     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5954     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(
5955         java.io.InputStream input,
5956         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5957         throws java.io.IOException {
5958       return PARSER.parseFrom(input, extensionRegistry);
5959     }
parseDelimitedFrom(java.io.InputStream input)5960     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseDelimitedFrom(java.io.InputStream input)
5961         throws java.io.IOException {
5962       return PARSER.parseDelimitedFrom(input);
5963     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5964     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseDelimitedFrom(
5965         java.io.InputStream input,
5966         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5967         throws java.io.IOException {
5968       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5969     }
parseFrom( com.google.protobuf.CodedInputStream input)5970     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(
5971         com.google.protobuf.CodedInputStream input)
5972         throws java.io.IOException {
5973       return PARSER.parseFrom(input);
5974     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5975     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parseFrom(
5976         com.google.protobuf.CodedInputStream input,
5977         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5978         throws java.io.IOException {
5979       return PARSER.parseFrom(input, extensionRegistry);
5980     }
5981 
newBuilder()5982     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()5983     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse prototype)5984     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse prototype) {
5985       return newBuilder().mergeFrom(prototype);
5986     }
toBuilder()5987     public Builder toBuilder() { return newBuilder(this); }
5988 
5989     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5990     protected Builder newBuilderForType(
5991         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5992       Builder builder = new Builder(parent);
5993       return builder;
5994     }
5995     /**
5996      * Protobuf type {@code DispatchMergingRegionsResponse}
5997      */
5998     public static final class Builder extends
5999         com.google.protobuf.GeneratedMessage.Builder<Builder>
6000        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponseOrBuilder {
6001       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6002           getDescriptor() {
6003         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_descriptor;
6004       }
6005 
6006       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6007           internalGetFieldAccessorTable() {
6008         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_fieldAccessorTable
6009             .ensureFieldAccessorsInitialized(
6010                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.Builder.class);
6011       }
6012 
6013       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.newBuilder()
Builder()6014       private Builder() {
6015         maybeForceBuilderInitialization();
6016       }
6017 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6018       private Builder(
6019           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6020         super(parent);
6021         maybeForceBuilderInitialization();
6022       }
maybeForceBuilderInitialization()6023       private void maybeForceBuilderInitialization() {
6024         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6025         }
6026       }
create()6027       private static Builder create() {
6028         return new Builder();
6029       }
6030 
clear()6031       public Builder clear() {
6032         super.clear();
6033         return this;
6034       }
6035 
clone()6036       public Builder clone() {
6037         return create().mergeFrom(buildPartial());
6038       }
6039 
6040       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6041           getDescriptorForType() {
6042         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DispatchMergingRegionsResponse_descriptor;
6043       }
6044 
getDefaultInstanceForType()6045       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse getDefaultInstanceForType() {
6046         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance();
6047       }
6048 
build()6049       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse build() {
6050         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse result = buildPartial();
6051         if (!result.isInitialized()) {
6052           throw newUninitializedMessageException(result);
6053         }
6054         return result;
6055       }
6056 
buildPartial()6057       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse buildPartial() {
6058         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse(this);
6059         onBuilt();
6060         return result;
6061       }
6062 
mergeFrom(com.google.protobuf.Message other)6063       public Builder mergeFrom(com.google.protobuf.Message other) {
6064         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) {
6065           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse)other);
6066         } else {
6067           super.mergeFrom(other);
6068           return this;
6069         }
6070       }
6071 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse other)6072       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse other) {
6073         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance()) return this;
6074         this.mergeUnknownFields(other.getUnknownFields());
6075         return this;
6076       }
6077 
isInitialized()6078       public final boolean isInitialized() {
6079         return true;
6080       }
6081 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6082       public Builder mergeFrom(
6083           com.google.protobuf.CodedInputStream input,
6084           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6085           throws java.io.IOException {
6086         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse parsedMessage = null;
6087         try {
6088           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6089         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6090           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) e.getUnfinishedMessage();
6091           throw e;
6092         } finally {
6093           if (parsedMessage != null) {
6094             mergeFrom(parsedMessage);
6095           }
6096         }
6097         return this;
6098       }
6099 
6100       // @@protoc_insertion_point(builder_scope:DispatchMergingRegionsResponse)
6101     }
6102 
6103     static {
6104       defaultInstance = new DispatchMergingRegionsResponse(true);
defaultInstance.initFields()6105       defaultInstance.initFields();
6106     }
6107 
6108     // @@protoc_insertion_point(class_scope:DispatchMergingRegionsResponse)
6109   }
6110 
6111   public interface AssignRegionRequestOrBuilder
6112       extends com.google.protobuf.MessageOrBuilder {
6113 
6114     // required .RegionSpecifier region = 1;
6115     /**
6116      * <code>required .RegionSpecifier region = 1;</code>
6117      */
hasRegion()6118     boolean hasRegion();
6119     /**
6120      * <code>required .RegionSpecifier region = 1;</code>
6121      */
getRegion()6122     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
6123     /**
6124      * <code>required .RegionSpecifier region = 1;</code>
6125      */
getRegionOrBuilder()6126     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
6127   }
6128   /**
6129    * Protobuf type {@code AssignRegionRequest}
6130    */
6131   public static final class AssignRegionRequest extends
6132       com.google.protobuf.GeneratedMessage
6133       implements AssignRegionRequestOrBuilder {
6134     // Use AssignRegionRequest.newBuilder() to construct.
AssignRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)6135     private AssignRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6136       super(builder);
6137       this.unknownFields = builder.getUnknownFields();
6138     }
AssignRegionRequest(boolean noInit)6139     private AssignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6140 
6141     private static final AssignRegionRequest defaultInstance;
getDefaultInstance()6142     public static AssignRegionRequest getDefaultInstance() {
6143       return defaultInstance;
6144     }
6145 
getDefaultInstanceForType()6146     public AssignRegionRequest getDefaultInstanceForType() {
6147       return defaultInstance;
6148     }
6149 
6150     private final com.google.protobuf.UnknownFieldSet unknownFields;
6151     @java.lang.Override
6152     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()6153         getUnknownFields() {
6154       return this.unknownFields;
6155     }
AssignRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6156     private AssignRegionRequest(
6157         com.google.protobuf.CodedInputStream input,
6158         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6159         throws com.google.protobuf.InvalidProtocolBufferException {
6160       initFields();
6161       int mutable_bitField0_ = 0;
6162       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6163           com.google.protobuf.UnknownFieldSet.newBuilder();
6164       try {
6165         boolean done = false;
6166         while (!done) {
6167           int tag = input.readTag();
6168           switch (tag) {
6169             case 0:
6170               done = true;
6171               break;
6172             default: {
6173               if (!parseUnknownField(input, unknownFields,
6174                                      extensionRegistry, tag)) {
6175                 done = true;
6176               }
6177               break;
6178             }
6179             case 10: {
6180               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
6181               if (((bitField0_ & 0x00000001) == 0x00000001)) {
6182                 subBuilder = region_.toBuilder();
6183               }
6184               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
6185               if (subBuilder != null) {
6186                 subBuilder.mergeFrom(region_);
6187                 region_ = subBuilder.buildPartial();
6188               }
6189               bitField0_ |= 0x00000001;
6190               break;
6191             }
6192           }
6193         }
6194       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6195         throw e.setUnfinishedMessage(this);
6196       } catch (java.io.IOException e) {
6197         throw new com.google.protobuf.InvalidProtocolBufferException(
6198             e.getMessage()).setUnfinishedMessage(this);
6199       } finally {
6200         this.unknownFields = unknownFields.build();
6201         makeExtensionsImmutable();
6202       }
6203     }
6204     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6205         getDescriptor() {
6206       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_descriptor;
6207     }
6208 
6209     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6210         internalGetFieldAccessorTable() {
6211       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_fieldAccessorTable
6212           .ensureFieldAccessorsInitialized(
6213               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.Builder.class);
6214     }
6215 
6216     public static com.google.protobuf.Parser<AssignRegionRequest> PARSER =
6217         new com.google.protobuf.AbstractParser<AssignRegionRequest>() {
6218       public AssignRegionRequest parsePartialFrom(
6219           com.google.protobuf.CodedInputStream input,
6220           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6221           throws com.google.protobuf.InvalidProtocolBufferException {
6222         return new AssignRegionRequest(input, extensionRegistry);
6223       }
6224     };
6225 
6226     @java.lang.Override
getParserForType()6227     public com.google.protobuf.Parser<AssignRegionRequest> getParserForType() {
6228       return PARSER;
6229     }
6230 
6231     private int bitField0_;
6232     // required .RegionSpecifier region = 1;
6233     public static final int REGION_FIELD_NUMBER = 1;
6234     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
6235     /**
6236      * <code>required .RegionSpecifier region = 1;</code>
6237      */
hasRegion()6238     public boolean hasRegion() {
6239       return ((bitField0_ & 0x00000001) == 0x00000001);
6240     }
6241     /**
6242      * <code>required .RegionSpecifier region = 1;</code>
6243      */
getRegion()6244     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
6245       return region_;
6246     }
6247     /**
6248      * <code>required .RegionSpecifier region = 1;</code>
6249      */
getRegionOrBuilder()6250     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
6251       return region_;
6252     }
6253 
initFields()6254     private void initFields() {
6255       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6256     }
6257     private byte memoizedIsInitialized = -1;
isInitialized()6258     public final boolean isInitialized() {
6259       byte isInitialized = memoizedIsInitialized;
6260       if (isInitialized != -1) return isInitialized == 1;
6261 
6262       if (!hasRegion()) {
6263         memoizedIsInitialized = 0;
6264         return false;
6265       }
6266       if (!getRegion().isInitialized()) {
6267         memoizedIsInitialized = 0;
6268         return false;
6269       }
6270       memoizedIsInitialized = 1;
6271       return true;
6272     }
6273 
writeTo(com.google.protobuf.CodedOutputStream output)6274     public void writeTo(com.google.protobuf.CodedOutputStream output)
6275                         throws java.io.IOException {
6276       getSerializedSize();
6277       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6278         output.writeMessage(1, region_);
6279       }
6280       getUnknownFields().writeTo(output);
6281     }
6282 
6283     private int memoizedSerializedSize = -1;
getSerializedSize()6284     public int getSerializedSize() {
6285       int size = memoizedSerializedSize;
6286       if (size != -1) return size;
6287 
6288       size = 0;
6289       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6290         size += com.google.protobuf.CodedOutputStream
6291           .computeMessageSize(1, region_);
6292       }
6293       size += getUnknownFields().getSerializedSize();
6294       memoizedSerializedSize = size;
6295       return size;
6296     }
6297 
6298     private static final long serialVersionUID = 0L;
6299     @java.lang.Override
writeReplace()6300     protected java.lang.Object writeReplace()
6301         throws java.io.ObjectStreamException {
6302       return super.writeReplace();
6303     }
6304 
6305     @java.lang.Override
equals(final java.lang.Object obj)6306     public boolean equals(final java.lang.Object obj) {
6307       if (obj == this) {
6308        return true;
6309       }
6310       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)) {
6311         return super.equals(obj);
6312       }
6313       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest) obj;
6314 
6315       boolean result = true;
6316       result = result && (hasRegion() == other.hasRegion());
6317       if (hasRegion()) {
6318         result = result && getRegion()
6319             .equals(other.getRegion());
6320       }
6321       result = result &&
6322           getUnknownFields().equals(other.getUnknownFields());
6323       return result;
6324     }
6325 
6326     private int memoizedHashCode = 0;
6327     @java.lang.Override
hashCode()6328     public int hashCode() {
6329       if (memoizedHashCode != 0) {
6330         return memoizedHashCode;
6331       }
6332       int hash = 41;
6333       hash = (19 * hash) + getDescriptorForType().hashCode();
6334       if (hasRegion()) {
6335         hash = (37 * hash) + REGION_FIELD_NUMBER;
6336         hash = (53 * hash) + getRegion().hashCode();
6337       }
6338       hash = (29 * hash) + getUnknownFields().hashCode();
6339       memoizedHashCode = hash;
6340       return hash;
6341     }
6342 
parseFrom( com.google.protobuf.ByteString data)6343     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(
6344         com.google.protobuf.ByteString data)
6345         throws com.google.protobuf.InvalidProtocolBufferException {
6346       return PARSER.parseFrom(data);
6347     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6348     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(
6349         com.google.protobuf.ByteString data,
6350         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6351         throws com.google.protobuf.InvalidProtocolBufferException {
6352       return PARSER.parseFrom(data, extensionRegistry);
6353     }
parseFrom(byte[] data)6354     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(byte[] data)
6355         throws com.google.protobuf.InvalidProtocolBufferException {
6356       return PARSER.parseFrom(data);
6357     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6358     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(
6359         byte[] data,
6360         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6361         throws com.google.protobuf.InvalidProtocolBufferException {
6362       return PARSER.parseFrom(data, extensionRegistry);
6363     }
parseFrom(java.io.InputStream input)6364     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(java.io.InputStream input)
6365         throws java.io.IOException {
6366       return PARSER.parseFrom(input);
6367     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6368     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(
6369         java.io.InputStream input,
6370         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6371         throws java.io.IOException {
6372       return PARSER.parseFrom(input, extensionRegistry);
6373     }
parseDelimitedFrom(java.io.InputStream input)6374     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseDelimitedFrom(java.io.InputStream input)
6375         throws java.io.IOException {
6376       return PARSER.parseDelimitedFrom(input);
6377     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6378     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseDelimitedFrom(
6379         java.io.InputStream input,
6380         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6381         throws java.io.IOException {
6382       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6383     }
parseFrom( com.google.protobuf.CodedInputStream input)6384     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(
6385         com.google.protobuf.CodedInputStream input)
6386         throws java.io.IOException {
6387       return PARSER.parseFrom(input);
6388     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6389     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parseFrom(
6390         com.google.protobuf.CodedInputStream input,
6391         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6392         throws java.io.IOException {
6393       return PARSER.parseFrom(input, extensionRegistry);
6394     }
6395 
newBuilder()6396     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6397     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest prototype)6398     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest prototype) {
6399       return newBuilder().mergeFrom(prototype);
6400     }
toBuilder()6401     public Builder toBuilder() { return newBuilder(this); }
6402 
6403     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6404     protected Builder newBuilderForType(
6405         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6406       Builder builder = new Builder(parent);
6407       return builder;
6408     }
6409     /**
6410      * Protobuf type {@code AssignRegionRequest}
6411      */
6412     public static final class Builder extends
6413         com.google.protobuf.GeneratedMessage.Builder<Builder>
6414        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequestOrBuilder {
6415       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6416           getDescriptor() {
6417         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_descriptor;
6418       }
6419 
6420       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6421           internalGetFieldAccessorTable() {
6422         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_fieldAccessorTable
6423             .ensureFieldAccessorsInitialized(
6424                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.Builder.class);
6425       }
6426 
6427       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.newBuilder()
Builder()6428       private Builder() {
6429         maybeForceBuilderInitialization();
6430       }
6431 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6432       private Builder(
6433           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6434         super(parent);
6435         maybeForceBuilderInitialization();
6436       }
maybeForceBuilderInitialization()6437       private void maybeForceBuilderInitialization() {
6438         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6439           getRegionFieldBuilder();
6440         }
6441       }
create()6442       private static Builder create() {
6443         return new Builder();
6444       }
6445 
clear()6446       public Builder clear() {
6447         super.clear();
6448         if (regionBuilder_ == null) {
6449           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6450         } else {
6451           regionBuilder_.clear();
6452         }
6453         bitField0_ = (bitField0_ & ~0x00000001);
6454         return this;
6455       }
6456 
clone()6457       public Builder clone() {
6458         return create().mergeFrom(buildPartial());
6459       }
6460 
6461       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6462           getDescriptorForType() {
6463         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionRequest_descriptor;
6464       }
6465 
getDefaultInstanceForType()6466       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest getDefaultInstanceForType() {
6467         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance();
6468       }
6469 
build()6470       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest build() {
6471         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest result = buildPartial();
6472         if (!result.isInitialized()) {
6473           throw newUninitializedMessageException(result);
6474         }
6475         return result;
6476       }
6477 
buildPartial()6478       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest buildPartial() {
6479         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest(this);
6480         int from_bitField0_ = bitField0_;
6481         int to_bitField0_ = 0;
6482         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6483           to_bitField0_ |= 0x00000001;
6484         }
6485         if (regionBuilder_ == null) {
6486           result.region_ = region_;
6487         } else {
6488           result.region_ = regionBuilder_.build();
6489         }
6490         result.bitField0_ = to_bitField0_;
6491         onBuilt();
6492         return result;
6493       }
6494 
mergeFrom(com.google.protobuf.Message other)6495       public Builder mergeFrom(com.google.protobuf.Message other) {
6496         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest) {
6497           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)other);
6498         } else {
6499           super.mergeFrom(other);
6500           return this;
6501         }
6502       }
6503 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest other)6504       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest other) {
6505         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance()) return this;
6506         if (other.hasRegion()) {
6507           mergeRegion(other.getRegion());
6508         }
6509         this.mergeUnknownFields(other.getUnknownFields());
6510         return this;
6511       }
6512 
isInitialized()6513       public final boolean isInitialized() {
6514         if (!hasRegion()) {
6515 
6516           return false;
6517         }
6518         if (!getRegion().isInitialized()) {
6519 
6520           return false;
6521         }
6522         return true;
6523       }
6524 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6525       public Builder mergeFrom(
6526           com.google.protobuf.CodedInputStream input,
6527           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6528           throws java.io.IOException {
6529         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest parsedMessage = null;
6530         try {
6531           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6532         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6533           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest) e.getUnfinishedMessage();
6534           throw e;
6535         } finally {
6536           if (parsedMessage != null) {
6537             mergeFrom(parsedMessage);
6538           }
6539         }
6540         return this;
6541       }
6542       private int bitField0_;
6543 
6544       // required .RegionSpecifier region = 1;
6545       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6546       private com.google.protobuf.SingleFieldBuilder<
6547           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
6548       /**
6549        * <code>required .RegionSpecifier region = 1;</code>
6550        */
hasRegion()6551       public boolean hasRegion() {
6552         return ((bitField0_ & 0x00000001) == 0x00000001);
6553       }
6554       /**
6555        * <code>required .RegionSpecifier region = 1;</code>
6556        */
getRegion()6557       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
6558         if (regionBuilder_ == null) {
6559           return region_;
6560         } else {
6561           return regionBuilder_.getMessage();
6562         }
6563       }
6564       /**
6565        * <code>required .RegionSpecifier region = 1;</code>
6566        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)6567       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6568         if (regionBuilder_ == null) {
6569           if (value == null) {
6570             throw new NullPointerException();
6571           }
6572           region_ = value;
6573           onChanged();
6574         } else {
6575           regionBuilder_.setMessage(value);
6576         }
6577         bitField0_ |= 0x00000001;
6578         return this;
6579       }
6580       /**
6581        * <code>required .RegionSpecifier region = 1;</code>
6582        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)6583       public Builder setRegion(
6584           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
6585         if (regionBuilder_ == null) {
6586           region_ = builderForValue.build();
6587           onChanged();
6588         } else {
6589           regionBuilder_.setMessage(builderForValue.build());
6590         }
6591         bitField0_ |= 0x00000001;
6592         return this;
6593       }
6594       /**
6595        * <code>required .RegionSpecifier region = 1;</code>
6596        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)6597       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6598         if (regionBuilder_ == null) {
6599           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6600               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
6601             region_ =
6602               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
6603           } else {
6604             region_ = value;
6605           }
6606           onChanged();
6607         } else {
6608           regionBuilder_.mergeFrom(value);
6609         }
6610         bitField0_ |= 0x00000001;
6611         return this;
6612       }
6613       /**
6614        * <code>required .RegionSpecifier region = 1;</code>
6615        */
clearRegion()6616       public Builder clearRegion() {
6617         if (regionBuilder_ == null) {
6618           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6619           onChanged();
6620         } else {
6621           regionBuilder_.clear();
6622         }
6623         bitField0_ = (bitField0_ & ~0x00000001);
6624         return this;
6625       }
6626       /**
6627        * <code>required .RegionSpecifier region = 1;</code>
6628        */
getRegionBuilder()6629       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
6630         bitField0_ |= 0x00000001;
6631         onChanged();
6632         return getRegionFieldBuilder().getBuilder();
6633       }
6634       /**
6635        * <code>required .RegionSpecifier region = 1;</code>
6636        */
getRegionOrBuilder()6637       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
6638         if (regionBuilder_ != null) {
6639           return regionBuilder_.getMessageOrBuilder();
6640         } else {
6641           return region_;
6642         }
6643       }
6644       /**
6645        * <code>required .RegionSpecifier region = 1;</code>
6646        */
6647       private com.google.protobuf.SingleFieldBuilder<
6648           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()6649           getRegionFieldBuilder() {
6650         if (regionBuilder_ == null) {
6651           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6652               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
6653                   region_,
6654                   getParentForChildren(),
6655                   isClean());
6656           region_ = null;
6657         }
6658         return regionBuilder_;
6659       }
6660 
6661       // @@protoc_insertion_point(builder_scope:AssignRegionRequest)
6662     }
6663 
6664     static {
6665       defaultInstance = new AssignRegionRequest(true);
defaultInstance.initFields()6666       defaultInstance.initFields();
6667     }
6668 
6669     // @@protoc_insertion_point(class_scope:AssignRegionRequest)
6670   }
6671 
6672   public interface AssignRegionResponseOrBuilder
6673       extends com.google.protobuf.MessageOrBuilder {
6674   }
6675   /**
6676    * Protobuf type {@code AssignRegionResponse}
6677    */
6678   public static final class AssignRegionResponse extends
6679       com.google.protobuf.GeneratedMessage
6680       implements AssignRegionResponseOrBuilder {
6681     // Use AssignRegionResponse.newBuilder() to construct.
AssignRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)6682     private AssignRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6683       super(builder);
6684       this.unknownFields = builder.getUnknownFields();
6685     }
AssignRegionResponse(boolean noInit)6686     private AssignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6687 
6688     private static final AssignRegionResponse defaultInstance;
getDefaultInstance()6689     public static AssignRegionResponse getDefaultInstance() {
6690       return defaultInstance;
6691     }
6692 
getDefaultInstanceForType()6693     public AssignRegionResponse getDefaultInstanceForType() {
6694       return defaultInstance;
6695     }
6696 
6697     private final com.google.protobuf.UnknownFieldSet unknownFields;
6698     @java.lang.Override
6699     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()6700         getUnknownFields() {
6701       return this.unknownFields;
6702     }
AssignRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6703     private AssignRegionResponse(
6704         com.google.protobuf.CodedInputStream input,
6705         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6706         throws com.google.protobuf.InvalidProtocolBufferException {
6707       initFields();
6708       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6709           com.google.protobuf.UnknownFieldSet.newBuilder();
6710       try {
6711         boolean done = false;
6712         while (!done) {
6713           int tag = input.readTag();
6714           switch (tag) {
6715             case 0:
6716               done = true;
6717               break;
6718             default: {
6719               if (!parseUnknownField(input, unknownFields,
6720                                      extensionRegistry, tag)) {
6721                 done = true;
6722               }
6723               break;
6724             }
6725           }
6726         }
6727       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6728         throw e.setUnfinishedMessage(this);
6729       } catch (java.io.IOException e) {
6730         throw new com.google.protobuf.InvalidProtocolBufferException(
6731             e.getMessage()).setUnfinishedMessage(this);
6732       } finally {
6733         this.unknownFields = unknownFields.build();
6734         makeExtensionsImmutable();
6735       }
6736     }
6737     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6738         getDescriptor() {
6739       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_descriptor;
6740     }
6741 
6742     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6743         internalGetFieldAccessorTable() {
6744       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_fieldAccessorTable
6745           .ensureFieldAccessorsInitialized(
6746               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.Builder.class);
6747     }
6748 
6749     public static com.google.protobuf.Parser<AssignRegionResponse> PARSER =
6750         new com.google.protobuf.AbstractParser<AssignRegionResponse>() {
6751       public AssignRegionResponse parsePartialFrom(
6752           com.google.protobuf.CodedInputStream input,
6753           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6754           throws com.google.protobuf.InvalidProtocolBufferException {
6755         return new AssignRegionResponse(input, extensionRegistry);
6756       }
6757     };
6758 
6759     @java.lang.Override
getParserForType()6760     public com.google.protobuf.Parser<AssignRegionResponse> getParserForType() {
6761       return PARSER;
6762     }
6763 
initFields()6764     private void initFields() {
6765     }
6766     private byte memoizedIsInitialized = -1;
isInitialized()6767     public final boolean isInitialized() {
6768       byte isInitialized = memoizedIsInitialized;
6769       if (isInitialized != -1) return isInitialized == 1;
6770 
6771       memoizedIsInitialized = 1;
6772       return true;
6773     }
6774 
writeTo(com.google.protobuf.CodedOutputStream output)6775     public void writeTo(com.google.protobuf.CodedOutputStream output)
6776                         throws java.io.IOException {
6777       getSerializedSize();
6778       getUnknownFields().writeTo(output);
6779     }
6780 
6781     private int memoizedSerializedSize = -1;
getSerializedSize()6782     public int getSerializedSize() {
6783       int size = memoizedSerializedSize;
6784       if (size != -1) return size;
6785 
6786       size = 0;
6787       size += getUnknownFields().getSerializedSize();
6788       memoizedSerializedSize = size;
6789       return size;
6790     }
6791 
6792     private static final long serialVersionUID = 0L;
6793     @java.lang.Override
writeReplace()6794     protected java.lang.Object writeReplace()
6795         throws java.io.ObjectStreamException {
6796       return super.writeReplace();
6797     }
6798 
6799     @java.lang.Override
equals(final java.lang.Object obj)6800     public boolean equals(final java.lang.Object obj) {
6801       if (obj == this) {
6802        return true;
6803       }
6804       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse)) {
6805         return super.equals(obj);
6806       }
6807       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) obj;
6808 
6809       boolean result = true;
6810       result = result &&
6811           getUnknownFields().equals(other.getUnknownFields());
6812       return result;
6813     }
6814 
6815     private int memoizedHashCode = 0;
6816     @java.lang.Override
hashCode()6817     public int hashCode() {
6818       if (memoizedHashCode != 0) {
6819         return memoizedHashCode;
6820       }
6821       int hash = 41;
6822       hash = (19 * hash) + getDescriptorForType().hashCode();
6823       hash = (29 * hash) + getUnknownFields().hashCode();
6824       memoizedHashCode = hash;
6825       return hash;
6826     }
6827 
parseFrom( com.google.protobuf.ByteString data)6828     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(
6829         com.google.protobuf.ByteString data)
6830         throws com.google.protobuf.InvalidProtocolBufferException {
6831       return PARSER.parseFrom(data);
6832     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6833     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(
6834         com.google.protobuf.ByteString data,
6835         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6836         throws com.google.protobuf.InvalidProtocolBufferException {
6837       return PARSER.parseFrom(data, extensionRegistry);
6838     }
parseFrom(byte[] data)6839     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(byte[] data)
6840         throws com.google.protobuf.InvalidProtocolBufferException {
6841       return PARSER.parseFrom(data);
6842     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6843     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(
6844         byte[] data,
6845         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6846         throws com.google.protobuf.InvalidProtocolBufferException {
6847       return PARSER.parseFrom(data, extensionRegistry);
6848     }
parseFrom(java.io.InputStream input)6849     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(java.io.InputStream input)
6850         throws java.io.IOException {
6851       return PARSER.parseFrom(input);
6852     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6853     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(
6854         java.io.InputStream input,
6855         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6856         throws java.io.IOException {
6857       return PARSER.parseFrom(input, extensionRegistry);
6858     }
parseDelimitedFrom(java.io.InputStream input)6859     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseDelimitedFrom(java.io.InputStream input)
6860         throws java.io.IOException {
6861       return PARSER.parseDelimitedFrom(input);
6862     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6863     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseDelimitedFrom(
6864         java.io.InputStream input,
6865         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6866         throws java.io.IOException {
6867       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6868     }
parseFrom( com.google.protobuf.CodedInputStream input)6869     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(
6870         com.google.protobuf.CodedInputStream input)
6871         throws java.io.IOException {
6872       return PARSER.parseFrom(input);
6873     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6874     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parseFrom(
6875         com.google.protobuf.CodedInputStream input,
6876         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6877         throws java.io.IOException {
6878       return PARSER.parseFrom(input, extensionRegistry);
6879     }
6880 
newBuilder()6881     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6882     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse prototype)6883     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse prototype) {
6884       return newBuilder().mergeFrom(prototype);
6885     }
toBuilder()6886     public Builder toBuilder() { return newBuilder(this); }
6887 
6888     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6889     protected Builder newBuilderForType(
6890         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6891       Builder builder = new Builder(parent);
6892       return builder;
6893     }
6894     /**
6895      * Protobuf type {@code AssignRegionResponse}
6896      */
6897     public static final class Builder extends
6898         com.google.protobuf.GeneratedMessage.Builder<Builder>
6899        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponseOrBuilder {
6900       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6901           getDescriptor() {
6902         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_descriptor;
6903       }
6904 
6905       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6906           internalGetFieldAccessorTable() {
6907         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_fieldAccessorTable
6908             .ensureFieldAccessorsInitialized(
6909                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.Builder.class);
6910       }
6911 
6912       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.newBuilder()
Builder()6913       private Builder() {
6914         maybeForceBuilderInitialization();
6915       }
6916 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6917       private Builder(
6918           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6919         super(parent);
6920         maybeForceBuilderInitialization();
6921       }
maybeForceBuilderInitialization()6922       private void maybeForceBuilderInitialization() {
6923         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6924         }
6925       }
create()6926       private static Builder create() {
6927         return new Builder();
6928       }
6929 
clear()6930       public Builder clear() {
6931         super.clear();
6932         return this;
6933       }
6934 
clone()6935       public Builder clone() {
6936         return create().mergeFrom(buildPartial());
6937       }
6938 
6939       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6940           getDescriptorForType() {
6941         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AssignRegionResponse_descriptor;
6942       }
6943 
getDefaultInstanceForType()6944       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse getDefaultInstanceForType() {
6945         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance();
6946       }
6947 
build()6948       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse build() {
6949         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse result = buildPartial();
6950         if (!result.isInitialized()) {
6951           throw newUninitializedMessageException(result);
6952         }
6953         return result;
6954       }
6955 
buildPartial()6956       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse buildPartial() {
6957         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse(this);
6958         onBuilt();
6959         return result;
6960       }
6961 
mergeFrom(com.google.protobuf.Message other)6962       public Builder mergeFrom(com.google.protobuf.Message other) {
6963         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) {
6964           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse)other);
6965         } else {
6966           super.mergeFrom(other);
6967           return this;
6968         }
6969       }
6970 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse other)6971       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse other) {
6972         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance()) return this;
6973         this.mergeUnknownFields(other.getUnknownFields());
6974         return this;
6975       }
6976 
isInitialized()6977       public final boolean isInitialized() {
6978         return true;
6979       }
6980 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6981       public Builder mergeFrom(
6982           com.google.protobuf.CodedInputStream input,
6983           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6984           throws java.io.IOException {
6985         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse parsedMessage = null;
6986         try {
6987           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6988         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6989           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) e.getUnfinishedMessage();
6990           throw e;
6991         } finally {
6992           if (parsedMessage != null) {
6993             mergeFrom(parsedMessage);
6994           }
6995         }
6996         return this;
6997       }
6998 
6999       // @@protoc_insertion_point(builder_scope:AssignRegionResponse)
7000     }
7001 
7002     static {
7003       defaultInstance = new AssignRegionResponse(true);
defaultInstance.initFields()7004       defaultInstance.initFields();
7005     }
7006 
7007     // @@protoc_insertion_point(class_scope:AssignRegionResponse)
7008   }
7009 
7010   public interface UnassignRegionRequestOrBuilder
7011       extends com.google.protobuf.MessageOrBuilder {
7012 
7013     // required .RegionSpecifier region = 1;
7014     /**
7015      * <code>required .RegionSpecifier region = 1;</code>
7016      */
hasRegion()7017     boolean hasRegion();
7018     /**
7019      * <code>required .RegionSpecifier region = 1;</code>
7020      */
getRegion()7021     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
7022     /**
7023      * <code>required .RegionSpecifier region = 1;</code>
7024      */
getRegionOrBuilder()7025     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
7026 
7027     // optional bool force = 2 [default = false];
7028     /**
7029      * <code>optional bool force = 2 [default = false];</code>
7030      */
hasForce()7031     boolean hasForce();
7032     /**
7033      * <code>optional bool force = 2 [default = false];</code>
7034      */
getForce()7035     boolean getForce();
7036   }
7037   /**
7038    * Protobuf type {@code UnassignRegionRequest}
7039    */
7040   public static final class UnassignRegionRequest extends
7041       com.google.protobuf.GeneratedMessage
7042       implements UnassignRegionRequestOrBuilder {
7043     // Use UnassignRegionRequest.newBuilder() to construct.
UnassignRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)7044     private UnassignRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7045       super(builder);
7046       this.unknownFields = builder.getUnknownFields();
7047     }
UnassignRegionRequest(boolean noInit)7048     private UnassignRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7049 
7050     private static final UnassignRegionRequest defaultInstance;
getDefaultInstance()7051     public static UnassignRegionRequest getDefaultInstance() {
7052       return defaultInstance;
7053     }
7054 
getDefaultInstanceForType()7055     public UnassignRegionRequest getDefaultInstanceForType() {
7056       return defaultInstance;
7057     }
7058 
7059     private final com.google.protobuf.UnknownFieldSet unknownFields;
7060     @java.lang.Override
7061     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7062         getUnknownFields() {
7063       return this.unknownFields;
7064     }
UnassignRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7065     private UnassignRegionRequest(
7066         com.google.protobuf.CodedInputStream input,
7067         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7068         throws com.google.protobuf.InvalidProtocolBufferException {
7069       initFields();
7070       int mutable_bitField0_ = 0;
7071       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7072           com.google.protobuf.UnknownFieldSet.newBuilder();
7073       try {
7074         boolean done = false;
7075         while (!done) {
7076           int tag = input.readTag();
7077           switch (tag) {
7078             case 0:
7079               done = true;
7080               break;
7081             default: {
7082               if (!parseUnknownField(input, unknownFields,
7083                                      extensionRegistry, tag)) {
7084                 done = true;
7085               }
7086               break;
7087             }
7088             case 10: {
7089               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
7090               if (((bitField0_ & 0x00000001) == 0x00000001)) {
7091                 subBuilder = region_.toBuilder();
7092               }
7093               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
7094               if (subBuilder != null) {
7095                 subBuilder.mergeFrom(region_);
7096                 region_ = subBuilder.buildPartial();
7097               }
7098               bitField0_ |= 0x00000001;
7099               break;
7100             }
7101             case 16: {
7102               bitField0_ |= 0x00000002;
7103               force_ = input.readBool();
7104               break;
7105             }
7106           }
7107         }
7108       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7109         throw e.setUnfinishedMessage(this);
7110       } catch (java.io.IOException e) {
7111         throw new com.google.protobuf.InvalidProtocolBufferException(
7112             e.getMessage()).setUnfinishedMessage(this);
7113       } finally {
7114         this.unknownFields = unknownFields.build();
7115         makeExtensionsImmutable();
7116       }
7117     }
7118     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7119         getDescriptor() {
7120       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_descriptor;
7121     }
7122 
7123     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7124         internalGetFieldAccessorTable() {
7125       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_fieldAccessorTable
7126           .ensureFieldAccessorsInitialized(
7127               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.Builder.class);
7128     }
7129 
7130     public static com.google.protobuf.Parser<UnassignRegionRequest> PARSER =
7131         new com.google.protobuf.AbstractParser<UnassignRegionRequest>() {
7132       public UnassignRegionRequest parsePartialFrom(
7133           com.google.protobuf.CodedInputStream input,
7134           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7135           throws com.google.protobuf.InvalidProtocolBufferException {
7136         return new UnassignRegionRequest(input, extensionRegistry);
7137       }
7138     };
7139 
7140     @java.lang.Override
getParserForType()7141     public com.google.protobuf.Parser<UnassignRegionRequest> getParserForType() {
7142       return PARSER;
7143     }
7144 
7145     private int bitField0_;
7146     // required .RegionSpecifier region = 1;
7147     public static final int REGION_FIELD_NUMBER = 1;
7148     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
7149     /**
7150      * <code>required .RegionSpecifier region = 1;</code>
7151      */
hasRegion()7152     public boolean hasRegion() {
7153       return ((bitField0_ & 0x00000001) == 0x00000001);
7154     }
7155     /**
7156      * <code>required .RegionSpecifier region = 1;</code>
7157      */
getRegion()7158     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
7159       return region_;
7160     }
7161     /**
7162      * <code>required .RegionSpecifier region = 1;</code>
7163      */
getRegionOrBuilder()7164     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
7165       return region_;
7166     }
7167 
7168     // optional bool force = 2 [default = false];
7169     public static final int FORCE_FIELD_NUMBER = 2;
7170     private boolean force_;
7171     /**
7172      * <code>optional bool force = 2 [default = false];</code>
7173      */
hasForce()7174     public boolean hasForce() {
7175       return ((bitField0_ & 0x00000002) == 0x00000002);
7176     }
7177     /**
7178      * <code>optional bool force = 2 [default = false];</code>
7179      */
getForce()7180     public boolean getForce() {
7181       return force_;
7182     }
7183 
initFields()7184     private void initFields() {
7185       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
7186       force_ = false;
7187     }
7188     private byte memoizedIsInitialized = -1;
isInitialized()7189     public final boolean isInitialized() {
7190       byte isInitialized = memoizedIsInitialized;
7191       if (isInitialized != -1) return isInitialized == 1;
7192 
7193       if (!hasRegion()) {
7194         memoizedIsInitialized = 0;
7195         return false;
7196       }
7197       if (!getRegion().isInitialized()) {
7198         memoizedIsInitialized = 0;
7199         return false;
7200       }
7201       memoizedIsInitialized = 1;
7202       return true;
7203     }
7204 
writeTo(com.google.protobuf.CodedOutputStream output)7205     public void writeTo(com.google.protobuf.CodedOutputStream output)
7206                         throws java.io.IOException {
7207       getSerializedSize();
7208       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7209         output.writeMessage(1, region_);
7210       }
7211       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7212         output.writeBool(2, force_);
7213       }
7214       getUnknownFields().writeTo(output);
7215     }
7216 
7217     private int memoizedSerializedSize = -1;
getSerializedSize()7218     public int getSerializedSize() {
7219       int size = memoizedSerializedSize;
7220       if (size != -1) return size;
7221 
7222       size = 0;
7223       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7224         size += com.google.protobuf.CodedOutputStream
7225           .computeMessageSize(1, region_);
7226       }
7227       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7228         size += com.google.protobuf.CodedOutputStream
7229           .computeBoolSize(2, force_);
7230       }
7231       size += getUnknownFields().getSerializedSize();
7232       memoizedSerializedSize = size;
7233       return size;
7234     }
7235 
7236     private static final long serialVersionUID = 0L;
7237     @java.lang.Override
writeReplace()7238     protected java.lang.Object writeReplace()
7239         throws java.io.ObjectStreamException {
7240       return super.writeReplace();
7241     }
7242 
7243     @java.lang.Override
equals(final java.lang.Object obj)7244     public boolean equals(final java.lang.Object obj) {
7245       if (obj == this) {
7246        return true;
7247       }
7248       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)) {
7249         return super.equals(obj);
7250       }
7251       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest) obj;
7252 
7253       boolean result = true;
7254       result = result && (hasRegion() == other.hasRegion());
7255       if (hasRegion()) {
7256         result = result && getRegion()
7257             .equals(other.getRegion());
7258       }
7259       result = result && (hasForce() == other.hasForce());
7260       if (hasForce()) {
7261         result = result && (getForce()
7262             == other.getForce());
7263       }
7264       result = result &&
7265           getUnknownFields().equals(other.getUnknownFields());
7266       return result;
7267     }
7268 
7269     private int memoizedHashCode = 0;
7270     @java.lang.Override
hashCode()7271     public int hashCode() {
7272       if (memoizedHashCode != 0) {
7273         return memoizedHashCode;
7274       }
7275       int hash = 41;
7276       hash = (19 * hash) + getDescriptorForType().hashCode();
7277       if (hasRegion()) {
7278         hash = (37 * hash) + REGION_FIELD_NUMBER;
7279         hash = (53 * hash) + getRegion().hashCode();
7280       }
7281       if (hasForce()) {
7282         hash = (37 * hash) + FORCE_FIELD_NUMBER;
7283         hash = (53 * hash) + hashBoolean(getForce());
7284       }
7285       hash = (29 * hash) + getUnknownFields().hashCode();
7286       memoizedHashCode = hash;
7287       return hash;
7288     }
7289 
parseFrom( com.google.protobuf.ByteString data)7290     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(
7291         com.google.protobuf.ByteString data)
7292         throws com.google.protobuf.InvalidProtocolBufferException {
7293       return PARSER.parseFrom(data);
7294     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7295     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(
7296         com.google.protobuf.ByteString data,
7297         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7298         throws com.google.protobuf.InvalidProtocolBufferException {
7299       return PARSER.parseFrom(data, extensionRegistry);
7300     }
parseFrom(byte[] data)7301     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(byte[] data)
7302         throws com.google.protobuf.InvalidProtocolBufferException {
7303       return PARSER.parseFrom(data);
7304     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7305     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(
7306         byte[] data,
7307         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7308         throws com.google.protobuf.InvalidProtocolBufferException {
7309       return PARSER.parseFrom(data, extensionRegistry);
7310     }
parseFrom(java.io.InputStream input)7311     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(java.io.InputStream input)
7312         throws java.io.IOException {
7313       return PARSER.parseFrom(input);
7314     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7315     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(
7316         java.io.InputStream input,
7317         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7318         throws java.io.IOException {
7319       return PARSER.parseFrom(input, extensionRegistry);
7320     }
parseDelimitedFrom(java.io.InputStream input)7321     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseDelimitedFrom(java.io.InputStream input)
7322         throws java.io.IOException {
7323       return PARSER.parseDelimitedFrom(input);
7324     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7325     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseDelimitedFrom(
7326         java.io.InputStream input,
7327         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7328         throws java.io.IOException {
7329       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7330     }
parseFrom( com.google.protobuf.CodedInputStream input)7331     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(
7332         com.google.protobuf.CodedInputStream input)
7333         throws java.io.IOException {
7334       return PARSER.parseFrom(input);
7335     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7336     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parseFrom(
7337         com.google.protobuf.CodedInputStream input,
7338         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7339         throws java.io.IOException {
7340       return PARSER.parseFrom(input, extensionRegistry);
7341     }
7342 
newBuilder()7343     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()7344     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest prototype)7345     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest prototype) {
7346       return newBuilder().mergeFrom(prototype);
7347     }
toBuilder()7348     public Builder toBuilder() { return newBuilder(this); }
7349 
7350     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7351     protected Builder newBuilderForType(
7352         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7353       Builder builder = new Builder(parent);
7354       return builder;
7355     }
7356     /**
7357      * Protobuf type {@code UnassignRegionRequest}
7358      */
7359     public static final class Builder extends
7360         com.google.protobuf.GeneratedMessage.Builder<Builder>
7361        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequestOrBuilder {
7362       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7363           getDescriptor() {
7364         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_descriptor;
7365       }
7366 
7367       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7368           internalGetFieldAccessorTable() {
7369         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_fieldAccessorTable
7370             .ensureFieldAccessorsInitialized(
7371                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.Builder.class);
7372       }
7373 
7374       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.newBuilder()
Builder()7375       private Builder() {
7376         maybeForceBuilderInitialization();
7377       }
7378 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7379       private Builder(
7380           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7381         super(parent);
7382         maybeForceBuilderInitialization();
7383       }
maybeForceBuilderInitialization()7384       private void maybeForceBuilderInitialization() {
7385         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7386           getRegionFieldBuilder();
7387         }
7388       }
create()7389       private static Builder create() {
7390         return new Builder();
7391       }
7392 
clear()7393       public Builder clear() {
7394         super.clear();
7395         if (regionBuilder_ == null) {
7396           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
7397         } else {
7398           regionBuilder_.clear();
7399         }
7400         bitField0_ = (bitField0_ & ~0x00000001);
7401         force_ = false;
7402         bitField0_ = (bitField0_ & ~0x00000002);
7403         return this;
7404       }
7405 
clone()7406       public Builder clone() {
7407         return create().mergeFrom(buildPartial());
7408       }
7409 
7410       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7411           getDescriptorForType() {
7412         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionRequest_descriptor;
7413       }
7414 
getDefaultInstanceForType()7415       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest getDefaultInstanceForType() {
7416         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance();
7417       }
7418 
build()7419       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest build() {
7420         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest result = buildPartial();
7421         if (!result.isInitialized()) {
7422           throw newUninitializedMessageException(result);
7423         }
7424         return result;
7425       }
7426 
buildPartial()7427       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest buildPartial() {
7428         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest(this);
7429         int from_bitField0_ = bitField0_;
7430         int to_bitField0_ = 0;
7431         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7432           to_bitField0_ |= 0x00000001;
7433         }
7434         if (regionBuilder_ == null) {
7435           result.region_ = region_;
7436         } else {
7437           result.region_ = regionBuilder_.build();
7438         }
7439         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7440           to_bitField0_ |= 0x00000002;
7441         }
7442         result.force_ = force_;
7443         result.bitField0_ = to_bitField0_;
7444         onBuilt();
7445         return result;
7446       }
7447 
mergeFrom(com.google.protobuf.Message other)7448       public Builder mergeFrom(com.google.protobuf.Message other) {
7449         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest) {
7450           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)other);
7451         } else {
7452           super.mergeFrom(other);
7453           return this;
7454         }
7455       }
7456 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest other)7457       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest other) {
7458         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance()) return this;
7459         if (other.hasRegion()) {
7460           mergeRegion(other.getRegion());
7461         }
7462         if (other.hasForce()) {
7463           setForce(other.getForce());
7464         }
7465         this.mergeUnknownFields(other.getUnknownFields());
7466         return this;
7467       }
7468 
isInitialized()7469       public final boolean isInitialized() {
7470         if (!hasRegion()) {
7471 
7472           return false;
7473         }
7474         if (!getRegion().isInitialized()) {
7475 
7476           return false;
7477         }
7478         return true;
7479       }
7480 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7481       public Builder mergeFrom(
7482           com.google.protobuf.CodedInputStream input,
7483           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7484           throws java.io.IOException {
7485         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest parsedMessage = null;
7486         try {
7487           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7488         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7489           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest) e.getUnfinishedMessage();
7490           throw e;
7491         } finally {
7492           if (parsedMessage != null) {
7493             mergeFrom(parsedMessage);
7494           }
7495         }
7496         return this;
7497       }
7498       private int bitField0_;
7499 
7500       // required .RegionSpecifier region = 1;
7501       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
7502       private com.google.protobuf.SingleFieldBuilder<
7503           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
7504       /**
7505        * <code>required .RegionSpecifier region = 1;</code>
7506        */
hasRegion()7507       public boolean hasRegion() {
7508         return ((bitField0_ & 0x00000001) == 0x00000001);
7509       }
7510       /**
7511        * <code>required .RegionSpecifier region = 1;</code>
7512        */
getRegion()7513       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
7514         if (regionBuilder_ == null) {
7515           return region_;
7516         } else {
7517           return regionBuilder_.getMessage();
7518         }
7519       }
7520       /**
7521        * <code>required .RegionSpecifier region = 1;</code>
7522        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)7523       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
7524         if (regionBuilder_ == null) {
7525           if (value == null) {
7526             throw new NullPointerException();
7527           }
7528           region_ = value;
7529           onChanged();
7530         } else {
7531           regionBuilder_.setMessage(value);
7532         }
7533         bitField0_ |= 0x00000001;
7534         return this;
7535       }
7536       /**
7537        * <code>required .RegionSpecifier region = 1;</code>
7538        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)7539       public Builder setRegion(
7540           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
7541         if (regionBuilder_ == null) {
7542           region_ = builderForValue.build();
7543           onChanged();
7544         } else {
7545           regionBuilder_.setMessage(builderForValue.build());
7546         }
7547         bitField0_ |= 0x00000001;
7548         return this;
7549       }
7550       /**
7551        * <code>required .RegionSpecifier region = 1;</code>
7552        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)7553       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
7554         if (regionBuilder_ == null) {
7555           if (((bitField0_ & 0x00000001) == 0x00000001) &&
7556               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
7557             region_ =
7558               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
7559           } else {
7560             region_ = value;
7561           }
7562           onChanged();
7563         } else {
7564           regionBuilder_.mergeFrom(value);
7565         }
7566         bitField0_ |= 0x00000001;
7567         return this;
7568       }
7569       /**
7570        * <code>required .RegionSpecifier region = 1;</code>
7571        */
clearRegion()7572       public Builder clearRegion() {
7573         if (regionBuilder_ == null) {
7574           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
7575           onChanged();
7576         } else {
7577           regionBuilder_.clear();
7578         }
7579         bitField0_ = (bitField0_ & ~0x00000001);
7580         return this;
7581       }
7582       /**
7583        * <code>required .RegionSpecifier region = 1;</code>
7584        */
getRegionBuilder()7585       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
7586         bitField0_ |= 0x00000001;
7587         onChanged();
7588         return getRegionFieldBuilder().getBuilder();
7589       }
7590       /**
7591        * <code>required .RegionSpecifier region = 1;</code>
7592        */
getRegionOrBuilder()7593       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
7594         if (regionBuilder_ != null) {
7595           return regionBuilder_.getMessageOrBuilder();
7596         } else {
7597           return region_;
7598         }
7599       }
7600       /**
7601        * <code>required .RegionSpecifier region = 1;</code>
7602        */
7603       private com.google.protobuf.SingleFieldBuilder<
7604           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()7605           getRegionFieldBuilder() {
7606         if (regionBuilder_ == null) {
7607           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7608               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
7609                   region_,
7610                   getParentForChildren(),
7611                   isClean());
7612           region_ = null;
7613         }
7614         return regionBuilder_;
7615       }
7616 
7617       // optional bool force = 2 [default = false];
7618       private boolean force_ ;
7619       /**
7620        * <code>optional bool force = 2 [default = false];</code>
7621        */
hasForce()7622       public boolean hasForce() {
7623         return ((bitField0_ & 0x00000002) == 0x00000002);
7624       }
7625       /**
7626        * <code>optional bool force = 2 [default = false];</code>
7627        */
getForce()7628       public boolean getForce() {
7629         return force_;
7630       }
7631       /**
7632        * <code>optional bool force = 2 [default = false];</code>
7633        */
setForce(boolean value)7634       public Builder setForce(boolean value) {
7635         bitField0_ |= 0x00000002;
7636         force_ = value;
7637         onChanged();
7638         return this;
7639       }
7640       /**
7641        * <code>optional bool force = 2 [default = false];</code>
7642        */
clearForce()7643       public Builder clearForce() {
7644         bitField0_ = (bitField0_ & ~0x00000002);
7645         force_ = false;
7646         onChanged();
7647         return this;
7648       }
7649 
7650       // @@protoc_insertion_point(builder_scope:UnassignRegionRequest)
7651     }
7652 
7653     static {
7654       defaultInstance = new UnassignRegionRequest(true);
defaultInstance.initFields()7655       defaultInstance.initFields();
7656     }
7657 
7658     // @@protoc_insertion_point(class_scope:UnassignRegionRequest)
7659   }
7660 
7661   public interface UnassignRegionResponseOrBuilder
7662       extends com.google.protobuf.MessageOrBuilder {
7663   }
7664   /**
7665    * Protobuf type {@code UnassignRegionResponse}
7666    */
7667   public static final class UnassignRegionResponse extends
7668       com.google.protobuf.GeneratedMessage
7669       implements UnassignRegionResponseOrBuilder {
7670     // Use UnassignRegionResponse.newBuilder() to construct.
UnassignRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)7671     private UnassignRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7672       super(builder);
7673       this.unknownFields = builder.getUnknownFields();
7674     }
UnassignRegionResponse(boolean noInit)7675     private UnassignRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7676 
7677     private static final UnassignRegionResponse defaultInstance;
getDefaultInstance()7678     public static UnassignRegionResponse getDefaultInstance() {
7679       return defaultInstance;
7680     }
7681 
getDefaultInstanceForType()7682     public UnassignRegionResponse getDefaultInstanceForType() {
7683       return defaultInstance;
7684     }
7685 
7686     private final com.google.protobuf.UnknownFieldSet unknownFields;
7687     @java.lang.Override
7688     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7689         getUnknownFields() {
7690       return this.unknownFields;
7691     }
UnassignRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7692     private UnassignRegionResponse(
7693         com.google.protobuf.CodedInputStream input,
7694         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7695         throws com.google.protobuf.InvalidProtocolBufferException {
7696       initFields();
7697       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7698           com.google.protobuf.UnknownFieldSet.newBuilder();
7699       try {
7700         boolean done = false;
7701         while (!done) {
7702           int tag = input.readTag();
7703           switch (tag) {
7704             case 0:
7705               done = true;
7706               break;
7707             default: {
7708               if (!parseUnknownField(input, unknownFields,
7709                                      extensionRegistry, tag)) {
7710                 done = true;
7711               }
7712               break;
7713             }
7714           }
7715         }
7716       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7717         throw e.setUnfinishedMessage(this);
7718       } catch (java.io.IOException e) {
7719         throw new com.google.protobuf.InvalidProtocolBufferException(
7720             e.getMessage()).setUnfinishedMessage(this);
7721       } finally {
7722         this.unknownFields = unknownFields.build();
7723         makeExtensionsImmutable();
7724       }
7725     }
7726     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7727         getDescriptor() {
7728       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_descriptor;
7729     }
7730 
7731     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7732         internalGetFieldAccessorTable() {
7733       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_fieldAccessorTable
7734           .ensureFieldAccessorsInitialized(
7735               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.Builder.class);
7736     }
7737 
7738     public static com.google.protobuf.Parser<UnassignRegionResponse> PARSER =
7739         new com.google.protobuf.AbstractParser<UnassignRegionResponse>() {
7740       public UnassignRegionResponse parsePartialFrom(
7741           com.google.protobuf.CodedInputStream input,
7742           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7743           throws com.google.protobuf.InvalidProtocolBufferException {
7744         return new UnassignRegionResponse(input, extensionRegistry);
7745       }
7746     };
7747 
7748     @java.lang.Override
getParserForType()7749     public com.google.protobuf.Parser<UnassignRegionResponse> getParserForType() {
7750       return PARSER;
7751     }
7752 
initFields()7753     private void initFields() {
7754     }
7755     private byte memoizedIsInitialized = -1;
isInitialized()7756     public final boolean isInitialized() {
7757       byte isInitialized = memoizedIsInitialized;
7758       if (isInitialized != -1) return isInitialized == 1;
7759 
7760       memoizedIsInitialized = 1;
7761       return true;
7762     }
7763 
writeTo(com.google.protobuf.CodedOutputStream output)7764     public void writeTo(com.google.protobuf.CodedOutputStream output)
7765                         throws java.io.IOException {
7766       getSerializedSize();
7767       getUnknownFields().writeTo(output);
7768     }
7769 
7770     private int memoizedSerializedSize = -1;
getSerializedSize()7771     public int getSerializedSize() {
7772       int size = memoizedSerializedSize;
7773       if (size != -1) return size;
7774 
7775       size = 0;
7776       size += getUnknownFields().getSerializedSize();
7777       memoizedSerializedSize = size;
7778       return size;
7779     }
7780 
7781     private static final long serialVersionUID = 0L;
7782     @java.lang.Override
writeReplace()7783     protected java.lang.Object writeReplace()
7784         throws java.io.ObjectStreamException {
7785       return super.writeReplace();
7786     }
7787 
7788     @java.lang.Override
equals(final java.lang.Object obj)7789     public boolean equals(final java.lang.Object obj) {
7790       if (obj == this) {
7791        return true;
7792       }
7793       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse)) {
7794         return super.equals(obj);
7795       }
7796       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) obj;
7797 
7798       boolean result = true;
7799       result = result &&
7800           getUnknownFields().equals(other.getUnknownFields());
7801       return result;
7802     }
7803 
7804     private int memoizedHashCode = 0;
7805     @java.lang.Override
hashCode()7806     public int hashCode() {
7807       if (memoizedHashCode != 0) {
7808         return memoizedHashCode;
7809       }
7810       int hash = 41;
7811       hash = (19 * hash) + getDescriptorForType().hashCode();
7812       hash = (29 * hash) + getUnknownFields().hashCode();
7813       memoizedHashCode = hash;
7814       return hash;
7815     }
7816 
parseFrom( com.google.protobuf.ByteString data)7817     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(
7818         com.google.protobuf.ByteString data)
7819         throws com.google.protobuf.InvalidProtocolBufferException {
7820       return PARSER.parseFrom(data);
7821     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7822     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(
7823         com.google.protobuf.ByteString data,
7824         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7825         throws com.google.protobuf.InvalidProtocolBufferException {
7826       return PARSER.parseFrom(data, extensionRegistry);
7827     }
parseFrom(byte[] data)7828     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(byte[] data)
7829         throws com.google.protobuf.InvalidProtocolBufferException {
7830       return PARSER.parseFrom(data);
7831     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7832     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(
7833         byte[] data,
7834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7835         throws com.google.protobuf.InvalidProtocolBufferException {
7836       return PARSER.parseFrom(data, extensionRegistry);
7837     }
parseFrom(java.io.InputStream input)7838     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(java.io.InputStream input)
7839         throws java.io.IOException {
7840       return PARSER.parseFrom(input);
7841     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7842     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(
7843         java.io.InputStream input,
7844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7845         throws java.io.IOException {
7846       return PARSER.parseFrom(input, extensionRegistry);
7847     }
parseDelimitedFrom(java.io.InputStream input)7848     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseDelimitedFrom(java.io.InputStream input)
7849         throws java.io.IOException {
7850       return PARSER.parseDelimitedFrom(input);
7851     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7852     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseDelimitedFrom(
7853         java.io.InputStream input,
7854         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7855         throws java.io.IOException {
7856       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7857     }
parseFrom( com.google.protobuf.CodedInputStream input)7858     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(
7859         com.google.protobuf.CodedInputStream input)
7860         throws java.io.IOException {
7861       return PARSER.parseFrom(input);
7862     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7863     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parseFrom(
7864         com.google.protobuf.CodedInputStream input,
7865         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7866         throws java.io.IOException {
7867       return PARSER.parseFrom(input, extensionRegistry);
7868     }
7869 
newBuilder()7870     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()7871     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse prototype)7872     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse prototype) {
7873       return newBuilder().mergeFrom(prototype);
7874     }
toBuilder()7875     public Builder toBuilder() { return newBuilder(this); }
7876 
7877     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7878     protected Builder newBuilderForType(
7879         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7880       Builder builder = new Builder(parent);
7881       return builder;
7882     }
7883     /**
7884      * Protobuf type {@code UnassignRegionResponse}
7885      */
7886     public static final class Builder extends
7887         com.google.protobuf.GeneratedMessage.Builder<Builder>
7888        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponseOrBuilder {
7889       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7890           getDescriptor() {
7891         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_descriptor;
7892       }
7893 
7894       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7895           internalGetFieldAccessorTable() {
7896         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_fieldAccessorTable
7897             .ensureFieldAccessorsInitialized(
7898                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.Builder.class);
7899       }
7900 
7901       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.newBuilder()
Builder()7902       private Builder() {
7903         maybeForceBuilderInitialization();
7904       }
7905 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7906       private Builder(
7907           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7908         super(parent);
7909         maybeForceBuilderInitialization();
7910       }
maybeForceBuilderInitialization()7911       private void maybeForceBuilderInitialization() {
7912         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7913         }
7914       }
create()7915       private static Builder create() {
7916         return new Builder();
7917       }
7918 
clear()7919       public Builder clear() {
7920         super.clear();
7921         return this;
7922       }
7923 
clone()7924       public Builder clone() {
7925         return create().mergeFrom(buildPartial());
7926       }
7927 
7928       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7929           getDescriptorForType() {
7930         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_UnassignRegionResponse_descriptor;
7931       }
7932 
getDefaultInstanceForType()7933       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse getDefaultInstanceForType() {
7934         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance();
7935       }
7936 
build()7937       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse build() {
7938         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse result = buildPartial();
7939         if (!result.isInitialized()) {
7940           throw newUninitializedMessageException(result);
7941         }
7942         return result;
7943       }
7944 
buildPartial()7945       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse buildPartial() {
7946         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse(this);
7947         onBuilt();
7948         return result;
7949       }
7950 
mergeFrom(com.google.protobuf.Message other)7951       public Builder mergeFrom(com.google.protobuf.Message other) {
7952         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) {
7953           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse)other);
7954         } else {
7955           super.mergeFrom(other);
7956           return this;
7957         }
7958       }
7959 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse other)7960       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse other) {
7961         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance()) return this;
7962         this.mergeUnknownFields(other.getUnknownFields());
7963         return this;
7964       }
7965 
isInitialized()7966       public final boolean isInitialized() {
7967         return true;
7968       }
7969 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7970       public Builder mergeFrom(
7971           com.google.protobuf.CodedInputStream input,
7972           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7973           throws java.io.IOException {
7974         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse parsedMessage = null;
7975         try {
7976           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7977         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7978           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) e.getUnfinishedMessage();
7979           throw e;
7980         } finally {
7981           if (parsedMessage != null) {
7982             mergeFrom(parsedMessage);
7983           }
7984         }
7985         return this;
7986       }
7987 
7988       // @@protoc_insertion_point(builder_scope:UnassignRegionResponse)
7989     }
7990 
7991     static {
7992       defaultInstance = new UnassignRegionResponse(true);
defaultInstance.initFields()7993       defaultInstance.initFields();
7994     }
7995 
7996     // @@protoc_insertion_point(class_scope:UnassignRegionResponse)
7997   }
7998 
7999   public interface OfflineRegionRequestOrBuilder
8000       extends com.google.protobuf.MessageOrBuilder {
8001 
8002     // required .RegionSpecifier region = 1;
8003     /**
8004      * <code>required .RegionSpecifier region = 1;</code>
8005      */
hasRegion()8006     boolean hasRegion();
8007     /**
8008      * <code>required .RegionSpecifier region = 1;</code>
8009      */
getRegion()8010     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
8011     /**
8012      * <code>required .RegionSpecifier region = 1;</code>
8013      */
getRegionOrBuilder()8014     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
8015   }
8016   /**
8017    * Protobuf type {@code OfflineRegionRequest}
8018    */
8019   public static final class OfflineRegionRequest extends
8020       com.google.protobuf.GeneratedMessage
8021       implements OfflineRegionRequestOrBuilder {
8022     // Use OfflineRegionRequest.newBuilder() to construct.
OfflineRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)8023     private OfflineRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8024       super(builder);
8025       this.unknownFields = builder.getUnknownFields();
8026     }
OfflineRegionRequest(boolean noInit)8027     private OfflineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8028 
8029     private static final OfflineRegionRequest defaultInstance;
getDefaultInstance()8030     public static OfflineRegionRequest getDefaultInstance() {
8031       return defaultInstance;
8032     }
8033 
getDefaultInstanceForType()8034     public OfflineRegionRequest getDefaultInstanceForType() {
8035       return defaultInstance;
8036     }
8037 
8038     private final com.google.protobuf.UnknownFieldSet unknownFields;
8039     @java.lang.Override
8040     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8041         getUnknownFields() {
8042       return this.unknownFields;
8043     }
OfflineRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8044     private OfflineRegionRequest(
8045         com.google.protobuf.CodedInputStream input,
8046         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8047         throws com.google.protobuf.InvalidProtocolBufferException {
8048       initFields();
8049       int mutable_bitField0_ = 0;
8050       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8051           com.google.protobuf.UnknownFieldSet.newBuilder();
8052       try {
8053         boolean done = false;
8054         while (!done) {
8055           int tag = input.readTag();
8056           switch (tag) {
8057             case 0:
8058               done = true;
8059               break;
8060             default: {
8061               if (!parseUnknownField(input, unknownFields,
8062                                      extensionRegistry, tag)) {
8063                 done = true;
8064               }
8065               break;
8066             }
8067             case 10: {
8068               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
8069               if (((bitField0_ & 0x00000001) == 0x00000001)) {
8070                 subBuilder = region_.toBuilder();
8071               }
8072               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
8073               if (subBuilder != null) {
8074                 subBuilder.mergeFrom(region_);
8075                 region_ = subBuilder.buildPartial();
8076               }
8077               bitField0_ |= 0x00000001;
8078               break;
8079             }
8080           }
8081         }
8082       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8083         throw e.setUnfinishedMessage(this);
8084       } catch (java.io.IOException e) {
8085         throw new com.google.protobuf.InvalidProtocolBufferException(
8086             e.getMessage()).setUnfinishedMessage(this);
8087       } finally {
8088         this.unknownFields = unknownFields.build();
8089         makeExtensionsImmutable();
8090       }
8091     }
8092     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8093         getDescriptor() {
8094       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor;
8095     }
8096 
8097     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8098         internalGetFieldAccessorTable() {
8099       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_fieldAccessorTable
8100           .ensureFieldAccessorsInitialized(
8101               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.Builder.class);
8102     }
8103 
8104     public static com.google.protobuf.Parser<OfflineRegionRequest> PARSER =
8105         new com.google.protobuf.AbstractParser<OfflineRegionRequest>() {
8106       public OfflineRegionRequest parsePartialFrom(
8107           com.google.protobuf.CodedInputStream input,
8108           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8109           throws com.google.protobuf.InvalidProtocolBufferException {
8110         return new OfflineRegionRequest(input, extensionRegistry);
8111       }
8112     };
8113 
8114     @java.lang.Override
getParserForType()8115     public com.google.protobuf.Parser<OfflineRegionRequest> getParserForType() {
8116       return PARSER;
8117     }
8118 
8119     private int bitField0_;
8120     // required .RegionSpecifier region = 1;
8121     public static final int REGION_FIELD_NUMBER = 1;
8122     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
8123     /**
8124      * <code>required .RegionSpecifier region = 1;</code>
8125      */
hasRegion()8126     public boolean hasRegion() {
8127       return ((bitField0_ & 0x00000001) == 0x00000001);
8128     }
8129     /**
8130      * <code>required .RegionSpecifier region = 1;</code>
8131      */
getRegion()8132     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
8133       return region_;
8134     }
8135     /**
8136      * <code>required .RegionSpecifier region = 1;</code>
8137      */
getRegionOrBuilder()8138     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
8139       return region_;
8140     }
8141 
initFields()8142     private void initFields() {
8143       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8144     }
8145     private byte memoizedIsInitialized = -1;
isInitialized()8146     public final boolean isInitialized() {
8147       byte isInitialized = memoizedIsInitialized;
8148       if (isInitialized != -1) return isInitialized == 1;
8149 
8150       if (!hasRegion()) {
8151         memoizedIsInitialized = 0;
8152         return false;
8153       }
8154       if (!getRegion().isInitialized()) {
8155         memoizedIsInitialized = 0;
8156         return false;
8157       }
8158       memoizedIsInitialized = 1;
8159       return true;
8160     }
8161 
writeTo(com.google.protobuf.CodedOutputStream output)8162     public void writeTo(com.google.protobuf.CodedOutputStream output)
8163                         throws java.io.IOException {
8164       getSerializedSize();
8165       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8166         output.writeMessage(1, region_);
8167       }
8168       getUnknownFields().writeTo(output);
8169     }
8170 
8171     private int memoizedSerializedSize = -1;
getSerializedSize()8172     public int getSerializedSize() {
8173       int size = memoizedSerializedSize;
8174       if (size != -1) return size;
8175 
8176       size = 0;
8177       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8178         size += com.google.protobuf.CodedOutputStream
8179           .computeMessageSize(1, region_);
8180       }
8181       size += getUnknownFields().getSerializedSize();
8182       memoizedSerializedSize = size;
8183       return size;
8184     }
8185 
8186     private static final long serialVersionUID = 0L;
8187     @java.lang.Override
writeReplace()8188     protected java.lang.Object writeReplace()
8189         throws java.io.ObjectStreamException {
8190       return super.writeReplace();
8191     }
8192 
8193     @java.lang.Override
equals(final java.lang.Object obj)8194     public boolean equals(final java.lang.Object obj) {
8195       if (obj == this) {
8196        return true;
8197       }
8198       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)) {
8199         return super.equals(obj);
8200       }
8201       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) obj;
8202 
8203       boolean result = true;
8204       result = result && (hasRegion() == other.hasRegion());
8205       if (hasRegion()) {
8206         result = result && getRegion()
8207             .equals(other.getRegion());
8208       }
8209       result = result &&
8210           getUnknownFields().equals(other.getUnknownFields());
8211       return result;
8212     }
8213 
8214     private int memoizedHashCode = 0;
8215     @java.lang.Override
hashCode()8216     public int hashCode() {
8217       if (memoizedHashCode != 0) {
8218         return memoizedHashCode;
8219       }
8220       int hash = 41;
8221       hash = (19 * hash) + getDescriptorForType().hashCode();
8222       if (hasRegion()) {
8223         hash = (37 * hash) + REGION_FIELD_NUMBER;
8224         hash = (53 * hash) + getRegion().hashCode();
8225       }
8226       hash = (29 * hash) + getUnknownFields().hashCode();
8227       memoizedHashCode = hash;
8228       return hash;
8229     }
8230 
parseFrom( com.google.protobuf.ByteString data)8231     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(
8232         com.google.protobuf.ByteString data)
8233         throws com.google.protobuf.InvalidProtocolBufferException {
8234       return PARSER.parseFrom(data);
8235     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8236     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(
8237         com.google.protobuf.ByteString data,
8238         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8239         throws com.google.protobuf.InvalidProtocolBufferException {
8240       return PARSER.parseFrom(data, extensionRegistry);
8241     }
parseFrom(byte[] data)8242     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(byte[] data)
8243         throws com.google.protobuf.InvalidProtocolBufferException {
8244       return PARSER.parseFrom(data);
8245     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8246     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(
8247         byte[] data,
8248         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8249         throws com.google.protobuf.InvalidProtocolBufferException {
8250       return PARSER.parseFrom(data, extensionRegistry);
8251     }
parseFrom(java.io.InputStream input)8252     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(java.io.InputStream input)
8253         throws java.io.IOException {
8254       return PARSER.parseFrom(input);
8255     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8256     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(
8257         java.io.InputStream input,
8258         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8259         throws java.io.IOException {
8260       return PARSER.parseFrom(input, extensionRegistry);
8261     }
parseDelimitedFrom(java.io.InputStream input)8262     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom(java.io.InputStream input)
8263         throws java.io.IOException {
8264       return PARSER.parseDelimitedFrom(input);
8265     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8266     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseDelimitedFrom(
8267         java.io.InputStream input,
8268         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8269         throws java.io.IOException {
8270       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8271     }
parseFrom( com.google.protobuf.CodedInputStream input)8272     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(
8273         com.google.protobuf.CodedInputStream input)
8274         throws java.io.IOException {
8275       return PARSER.parseFrom(input);
8276     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8277     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parseFrom(
8278         com.google.protobuf.CodedInputStream input,
8279         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8280         throws java.io.IOException {
8281       return PARSER.parseFrom(input, extensionRegistry);
8282     }
8283 
newBuilder()8284     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()8285     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest prototype)8286     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest prototype) {
8287       return newBuilder().mergeFrom(prototype);
8288     }
toBuilder()8289     public Builder toBuilder() { return newBuilder(this); }
8290 
8291     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8292     protected Builder newBuilderForType(
8293         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8294       Builder builder = new Builder(parent);
8295       return builder;
8296     }
8297     /**
8298      * Protobuf type {@code OfflineRegionRequest}
8299      */
8300     public static final class Builder extends
8301         com.google.protobuf.GeneratedMessage.Builder<Builder>
8302        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequestOrBuilder {
8303       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8304           getDescriptor() {
8305         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor;
8306       }
8307 
8308       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8309           internalGetFieldAccessorTable() {
8310         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_fieldAccessorTable
8311             .ensureFieldAccessorsInitialized(
8312                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.Builder.class);
8313       }
8314 
8315       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.newBuilder()
Builder()8316       private Builder() {
8317         maybeForceBuilderInitialization();
8318       }
8319 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8320       private Builder(
8321           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8322         super(parent);
8323         maybeForceBuilderInitialization();
8324       }
maybeForceBuilderInitialization()8325       private void maybeForceBuilderInitialization() {
8326         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8327           getRegionFieldBuilder();
8328         }
8329       }
create()8330       private static Builder create() {
8331         return new Builder();
8332       }
8333 
clear()8334       public Builder clear() {
8335         super.clear();
8336         if (regionBuilder_ == null) {
8337           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8338         } else {
8339           regionBuilder_.clear();
8340         }
8341         bitField0_ = (bitField0_ & ~0x00000001);
8342         return this;
8343       }
8344 
clone()8345       public Builder clone() {
8346         return create().mergeFrom(buildPartial());
8347       }
8348 
8349       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()8350           getDescriptorForType() {
8351         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionRequest_descriptor;
8352       }
8353 
getDefaultInstanceForType()8354       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest getDefaultInstanceForType() {
8355         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance();
8356       }
8357 
build()8358       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest build() {
8359         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = buildPartial();
8360         if (!result.isInitialized()) {
8361           throw newUninitializedMessageException(result);
8362         }
8363         return result;
8364       }
8365 
buildPartial()8366       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest buildPartial() {
8367         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest(this);
8368         int from_bitField0_ = bitField0_;
8369         int to_bitField0_ = 0;
8370         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8371           to_bitField0_ |= 0x00000001;
8372         }
8373         if (regionBuilder_ == null) {
8374           result.region_ = region_;
8375         } else {
8376           result.region_ = regionBuilder_.build();
8377         }
8378         result.bitField0_ = to_bitField0_;
8379         onBuilt();
8380         return result;
8381       }
8382 
mergeFrom(com.google.protobuf.Message other)8383       public Builder mergeFrom(com.google.protobuf.Message other) {
8384         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) {
8385           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)other);
8386         } else {
8387           super.mergeFrom(other);
8388           return this;
8389         }
8390       }
8391 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other)8392       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest other) {
8393         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance()) return this;
8394         if (other.hasRegion()) {
8395           mergeRegion(other.getRegion());
8396         }
8397         this.mergeUnknownFields(other.getUnknownFields());
8398         return this;
8399       }
8400 
isInitialized()8401       public final boolean isInitialized() {
8402         if (!hasRegion()) {
8403 
8404           return false;
8405         }
8406         if (!getRegion().isInitialized()) {
8407 
8408           return false;
8409         }
8410         return true;
8411       }
8412 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8413       public Builder mergeFrom(
8414           com.google.protobuf.CodedInputStream input,
8415           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8416           throws java.io.IOException {
8417         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest parsedMessage = null;
8418         try {
8419           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8420         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8421           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest) e.getUnfinishedMessage();
8422           throw e;
8423         } finally {
8424           if (parsedMessage != null) {
8425             mergeFrom(parsedMessage);
8426           }
8427         }
8428         return this;
8429       }
8430       private int bitField0_;
8431 
8432       // required .RegionSpecifier region = 1;
8433       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8434       private com.google.protobuf.SingleFieldBuilder<
8435           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
8436       /**
8437        * <code>required .RegionSpecifier region = 1;</code>
8438        */
hasRegion()8439       public boolean hasRegion() {
8440         return ((bitField0_ & 0x00000001) == 0x00000001);
8441       }
8442       /**
8443        * <code>required .RegionSpecifier region = 1;</code>
8444        */
getRegion()8445       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
8446         if (regionBuilder_ == null) {
8447           return region_;
8448         } else {
8449           return regionBuilder_.getMessage();
8450         }
8451       }
8452       /**
8453        * <code>required .RegionSpecifier region = 1;</code>
8454        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)8455       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
8456         if (regionBuilder_ == null) {
8457           if (value == null) {
8458             throw new NullPointerException();
8459           }
8460           region_ = value;
8461           onChanged();
8462         } else {
8463           regionBuilder_.setMessage(value);
8464         }
8465         bitField0_ |= 0x00000001;
8466         return this;
8467       }
8468       /**
8469        * <code>required .RegionSpecifier region = 1;</code>
8470        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)8471       public Builder setRegion(
8472           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
8473         if (regionBuilder_ == null) {
8474           region_ = builderForValue.build();
8475           onChanged();
8476         } else {
8477           regionBuilder_.setMessage(builderForValue.build());
8478         }
8479         bitField0_ |= 0x00000001;
8480         return this;
8481       }
8482       /**
8483        * <code>required .RegionSpecifier region = 1;</code>
8484        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)8485       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
8486         if (regionBuilder_ == null) {
8487           if (((bitField0_ & 0x00000001) == 0x00000001) &&
8488               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
8489             region_ =
8490               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
8491           } else {
8492             region_ = value;
8493           }
8494           onChanged();
8495         } else {
8496           regionBuilder_.mergeFrom(value);
8497         }
8498         bitField0_ |= 0x00000001;
8499         return this;
8500       }
8501       /**
8502        * <code>required .RegionSpecifier region = 1;</code>
8503        */
clearRegion()8504       public Builder clearRegion() {
8505         if (regionBuilder_ == null) {
8506           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
8507           onChanged();
8508         } else {
8509           regionBuilder_.clear();
8510         }
8511         bitField0_ = (bitField0_ & ~0x00000001);
8512         return this;
8513       }
8514       /**
8515        * <code>required .RegionSpecifier region = 1;</code>
8516        */
getRegionBuilder()8517       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
8518         bitField0_ |= 0x00000001;
8519         onChanged();
8520         return getRegionFieldBuilder().getBuilder();
8521       }
8522       /**
8523        * <code>required .RegionSpecifier region = 1;</code>
8524        */
getRegionOrBuilder()8525       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
8526         if (regionBuilder_ != null) {
8527           return regionBuilder_.getMessageOrBuilder();
8528         } else {
8529           return region_;
8530         }
8531       }
8532       /**
8533        * <code>required .RegionSpecifier region = 1;</code>
8534        */
8535       private com.google.protobuf.SingleFieldBuilder<
8536           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()8537           getRegionFieldBuilder() {
8538         if (regionBuilder_ == null) {
8539           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8540               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
8541                   region_,
8542                   getParentForChildren(),
8543                   isClean());
8544           region_ = null;
8545         }
8546         return regionBuilder_;
8547       }
8548 
8549       // @@protoc_insertion_point(builder_scope:OfflineRegionRequest)
8550     }
8551 
8552     static {
8553       defaultInstance = new OfflineRegionRequest(true);
defaultInstance.initFields()8554       defaultInstance.initFields();
8555     }
8556 
8557     // @@protoc_insertion_point(class_scope:OfflineRegionRequest)
8558   }
8559 
8560   public interface OfflineRegionResponseOrBuilder
8561       extends com.google.protobuf.MessageOrBuilder {
8562   }
8563   /**
8564    * Protobuf type {@code OfflineRegionResponse}
8565    */
8566   public static final class OfflineRegionResponse extends
8567       com.google.protobuf.GeneratedMessage
8568       implements OfflineRegionResponseOrBuilder {
8569     // Use OfflineRegionResponse.newBuilder() to construct.
OfflineRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)8570     private OfflineRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8571       super(builder);
8572       this.unknownFields = builder.getUnknownFields();
8573     }
OfflineRegionResponse(boolean noInit)8574     private OfflineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8575 
8576     private static final OfflineRegionResponse defaultInstance;
getDefaultInstance()8577     public static OfflineRegionResponse getDefaultInstance() {
8578       return defaultInstance;
8579     }
8580 
getDefaultInstanceForType()8581     public OfflineRegionResponse getDefaultInstanceForType() {
8582       return defaultInstance;
8583     }
8584 
8585     private final com.google.protobuf.UnknownFieldSet unknownFields;
8586     @java.lang.Override
8587     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8588         getUnknownFields() {
8589       return this.unknownFields;
8590     }
OfflineRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8591     private OfflineRegionResponse(
8592         com.google.protobuf.CodedInputStream input,
8593         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8594         throws com.google.protobuf.InvalidProtocolBufferException {
8595       initFields();
8596       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8597           com.google.protobuf.UnknownFieldSet.newBuilder();
8598       try {
8599         boolean done = false;
8600         while (!done) {
8601           int tag = input.readTag();
8602           switch (tag) {
8603             case 0:
8604               done = true;
8605               break;
8606             default: {
8607               if (!parseUnknownField(input, unknownFields,
8608                                      extensionRegistry, tag)) {
8609                 done = true;
8610               }
8611               break;
8612             }
8613           }
8614         }
8615       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8616         throw e.setUnfinishedMessage(this);
8617       } catch (java.io.IOException e) {
8618         throw new com.google.protobuf.InvalidProtocolBufferException(
8619             e.getMessage()).setUnfinishedMessage(this);
8620       } finally {
8621         this.unknownFields = unknownFields.build();
8622         makeExtensionsImmutable();
8623       }
8624     }
8625     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8626         getDescriptor() {
8627       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor;
8628     }
8629 
8630     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8631         internalGetFieldAccessorTable() {
8632       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_fieldAccessorTable
8633           .ensureFieldAccessorsInitialized(
8634               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.Builder.class);
8635     }
8636 
8637     public static com.google.protobuf.Parser<OfflineRegionResponse> PARSER =
8638         new com.google.protobuf.AbstractParser<OfflineRegionResponse>() {
8639       public OfflineRegionResponse parsePartialFrom(
8640           com.google.protobuf.CodedInputStream input,
8641           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8642           throws com.google.protobuf.InvalidProtocolBufferException {
8643         return new OfflineRegionResponse(input, extensionRegistry);
8644       }
8645     };
8646 
8647     @java.lang.Override
getParserForType()8648     public com.google.protobuf.Parser<OfflineRegionResponse> getParserForType() {
8649       return PARSER;
8650     }
8651 
initFields()8652     private void initFields() {
8653     }
8654     private byte memoizedIsInitialized = -1;
isInitialized()8655     public final boolean isInitialized() {
8656       byte isInitialized = memoizedIsInitialized;
8657       if (isInitialized != -1) return isInitialized == 1;
8658 
8659       memoizedIsInitialized = 1;
8660       return true;
8661     }
8662 
writeTo(com.google.protobuf.CodedOutputStream output)8663     public void writeTo(com.google.protobuf.CodedOutputStream output)
8664                         throws java.io.IOException {
8665       getSerializedSize();
8666       getUnknownFields().writeTo(output);
8667     }
8668 
8669     private int memoizedSerializedSize = -1;
getSerializedSize()8670     public int getSerializedSize() {
8671       int size = memoizedSerializedSize;
8672       if (size != -1) return size;
8673 
8674       size = 0;
8675       size += getUnknownFields().getSerializedSize();
8676       memoizedSerializedSize = size;
8677       return size;
8678     }
8679 
8680     private static final long serialVersionUID = 0L;
8681     @java.lang.Override
writeReplace()8682     protected java.lang.Object writeReplace()
8683         throws java.io.ObjectStreamException {
8684       return super.writeReplace();
8685     }
8686 
8687     @java.lang.Override
equals(final java.lang.Object obj)8688     public boolean equals(final java.lang.Object obj) {
8689       if (obj == this) {
8690        return true;
8691       }
8692       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse)) {
8693         return super.equals(obj);
8694       }
8695       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) obj;
8696 
8697       boolean result = true;
8698       result = result &&
8699           getUnknownFields().equals(other.getUnknownFields());
8700       return result;
8701     }
8702 
8703     private int memoizedHashCode = 0;
8704     @java.lang.Override
hashCode()8705     public int hashCode() {
8706       if (memoizedHashCode != 0) {
8707         return memoizedHashCode;
8708       }
8709       int hash = 41;
8710       hash = (19 * hash) + getDescriptorForType().hashCode();
8711       hash = (29 * hash) + getUnknownFields().hashCode();
8712       memoizedHashCode = hash;
8713       return hash;
8714     }
8715 
parseFrom( com.google.protobuf.ByteString data)8716     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(
8717         com.google.protobuf.ByteString data)
8718         throws com.google.protobuf.InvalidProtocolBufferException {
8719       return PARSER.parseFrom(data);
8720     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8721     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(
8722         com.google.protobuf.ByteString data,
8723         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8724         throws com.google.protobuf.InvalidProtocolBufferException {
8725       return PARSER.parseFrom(data, extensionRegistry);
8726     }
parseFrom(byte[] data)8727     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(byte[] data)
8728         throws com.google.protobuf.InvalidProtocolBufferException {
8729       return PARSER.parseFrom(data);
8730     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8731     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(
8732         byte[] data,
8733         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8734         throws com.google.protobuf.InvalidProtocolBufferException {
8735       return PARSER.parseFrom(data, extensionRegistry);
8736     }
parseFrom(java.io.InputStream input)8737     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(java.io.InputStream input)
8738         throws java.io.IOException {
8739       return PARSER.parseFrom(input);
8740     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8741     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(
8742         java.io.InputStream input,
8743         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8744         throws java.io.IOException {
8745       return PARSER.parseFrom(input, extensionRegistry);
8746     }
parseDelimitedFrom(java.io.InputStream input)8747     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom(java.io.InputStream input)
8748         throws java.io.IOException {
8749       return PARSER.parseDelimitedFrom(input);
8750     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8751     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseDelimitedFrom(
8752         java.io.InputStream input,
8753         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8754         throws java.io.IOException {
8755       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8756     }
parseFrom( com.google.protobuf.CodedInputStream input)8757     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(
8758         com.google.protobuf.CodedInputStream input)
8759         throws java.io.IOException {
8760       return PARSER.parseFrom(input);
8761     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8762     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parseFrom(
8763         com.google.protobuf.CodedInputStream input,
8764         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8765         throws java.io.IOException {
8766       return PARSER.parseFrom(input, extensionRegistry);
8767     }
8768 
newBuilder()8769     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()8770     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse prototype)8771     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse prototype) {
8772       return newBuilder().mergeFrom(prototype);
8773     }
toBuilder()8774     public Builder toBuilder() { return newBuilder(this); }
8775 
8776     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8777     protected Builder newBuilderForType(
8778         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8779       Builder builder = new Builder(parent);
8780       return builder;
8781     }
8782     /**
8783      * Protobuf type {@code OfflineRegionResponse}
8784      */
8785     public static final class Builder extends
8786         com.google.protobuf.GeneratedMessage.Builder<Builder>
8787        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponseOrBuilder {
8788       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8789           getDescriptor() {
8790         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor;
8791       }
8792 
8793       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8794           internalGetFieldAccessorTable() {
8795         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_fieldAccessorTable
8796             .ensureFieldAccessorsInitialized(
8797                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.Builder.class);
8798       }
8799 
8800       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.newBuilder()
Builder()8801       private Builder() {
8802         maybeForceBuilderInitialization();
8803       }
8804 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8805       private Builder(
8806           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8807         super(parent);
8808         maybeForceBuilderInitialization();
8809       }
maybeForceBuilderInitialization()8810       private void maybeForceBuilderInitialization() {
8811         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8812         }
8813       }
create()8814       private static Builder create() {
8815         return new Builder();
8816       }
8817 
clear()8818       public Builder clear() {
8819         super.clear();
8820         return this;
8821       }
8822 
clone()8823       public Builder clone() {
8824         return create().mergeFrom(buildPartial());
8825       }
8826 
8827       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()8828           getDescriptorForType() {
8829         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_OfflineRegionResponse_descriptor;
8830       }
8831 
getDefaultInstanceForType()8832       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse getDefaultInstanceForType() {
8833         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance();
8834       }
8835 
build()8836       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse build() {
8837         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = buildPartial();
8838         if (!result.isInitialized()) {
8839           throw newUninitializedMessageException(result);
8840         }
8841         return result;
8842       }
8843 
buildPartial()8844       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse buildPartial() {
8845         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse(this);
8846         onBuilt();
8847         return result;
8848       }
8849 
mergeFrom(com.google.protobuf.Message other)8850       public Builder mergeFrom(com.google.protobuf.Message other) {
8851         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) {
8852           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse)other);
8853         } else {
8854           super.mergeFrom(other);
8855           return this;
8856         }
8857       }
8858 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other)8859       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse other) {
8860         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()) return this;
8861         this.mergeUnknownFields(other.getUnknownFields());
8862         return this;
8863       }
8864 
isInitialized()8865       public final boolean isInitialized() {
8866         return true;
8867       }
8868 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8869       public Builder mergeFrom(
8870           com.google.protobuf.CodedInputStream input,
8871           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8872           throws java.io.IOException {
8873         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse parsedMessage = null;
8874         try {
8875           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8876         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8877           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) e.getUnfinishedMessage();
8878           throw e;
8879         } finally {
8880           if (parsedMessage != null) {
8881             mergeFrom(parsedMessage);
8882           }
8883         }
8884         return this;
8885       }
8886 
8887       // @@protoc_insertion_point(builder_scope:OfflineRegionResponse)
8888     }
8889 
8890     static {
8891       defaultInstance = new OfflineRegionResponse(true);
defaultInstance.initFields()8892       defaultInstance.initFields();
8893     }
8894 
8895     // @@protoc_insertion_point(class_scope:OfflineRegionResponse)
8896   }
8897 
8898   public interface CreateTableRequestOrBuilder
8899       extends com.google.protobuf.MessageOrBuilder {
8900 
8901     // required .TableSchema table_schema = 1;
8902     /**
8903      * <code>required .TableSchema table_schema = 1;</code>
8904      */
hasTableSchema()8905     boolean hasTableSchema();
8906     /**
8907      * <code>required .TableSchema table_schema = 1;</code>
8908      */
getTableSchema()8909     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema();
8910     /**
8911      * <code>required .TableSchema table_schema = 1;</code>
8912      */
getTableSchemaOrBuilder()8913     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder();
8914 
8915     // repeated bytes split_keys = 2;
8916     /**
8917      * <code>repeated bytes split_keys = 2;</code>
8918      */
getSplitKeysList()8919     java.util.List<com.google.protobuf.ByteString> getSplitKeysList();
8920     /**
8921      * <code>repeated bytes split_keys = 2;</code>
8922      */
getSplitKeysCount()8923     int getSplitKeysCount();
8924     /**
8925      * <code>repeated bytes split_keys = 2;</code>
8926      */
getSplitKeys(int index)8927     com.google.protobuf.ByteString getSplitKeys(int index);
8928 
8929     // optional uint64 nonce_group = 3 [default = 0];
8930     /**
8931      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
8932      */
hasNonceGroup()8933     boolean hasNonceGroup();
8934     /**
8935      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
8936      */
getNonceGroup()8937     long getNonceGroup();
8938 
8939     // optional uint64 nonce = 4 [default = 0];
8940     /**
8941      * <code>optional uint64 nonce = 4 [default = 0];</code>
8942      */
hasNonce()8943     boolean hasNonce();
8944     /**
8945      * <code>optional uint64 nonce = 4 [default = 0];</code>
8946      */
getNonce()8947     long getNonce();
8948   }
8949   /**
8950    * Protobuf type {@code CreateTableRequest}
8951    */
8952   public static final class CreateTableRequest extends
8953       com.google.protobuf.GeneratedMessage
8954       implements CreateTableRequestOrBuilder {
8955     // Use CreateTableRequest.newBuilder() to construct.
CreateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)8956     private CreateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8957       super(builder);
8958       this.unknownFields = builder.getUnknownFields();
8959     }
CreateTableRequest(boolean noInit)8960     private CreateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8961 
8962     private static final CreateTableRequest defaultInstance;
getDefaultInstance()8963     public static CreateTableRequest getDefaultInstance() {
8964       return defaultInstance;
8965     }
8966 
getDefaultInstanceForType()8967     public CreateTableRequest getDefaultInstanceForType() {
8968       return defaultInstance;
8969     }
8970 
8971     private final com.google.protobuf.UnknownFieldSet unknownFields;
8972     @java.lang.Override
8973     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8974         getUnknownFields() {
8975       return this.unknownFields;
8976     }
CreateTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8977     private CreateTableRequest(
8978         com.google.protobuf.CodedInputStream input,
8979         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8980         throws com.google.protobuf.InvalidProtocolBufferException {
8981       initFields();
8982       int mutable_bitField0_ = 0;
8983       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8984           com.google.protobuf.UnknownFieldSet.newBuilder();
8985       try {
8986         boolean done = false;
8987         while (!done) {
8988           int tag = input.readTag();
8989           switch (tag) {
8990             case 0:
8991               done = true;
8992               break;
8993             default: {
8994               if (!parseUnknownField(input, unknownFields,
8995                                      extensionRegistry, tag)) {
8996                 done = true;
8997               }
8998               break;
8999             }
9000             case 10: {
9001               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null;
9002               if (((bitField0_ & 0x00000001) == 0x00000001)) {
9003                 subBuilder = tableSchema_.toBuilder();
9004               }
9005               tableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry);
9006               if (subBuilder != null) {
9007                 subBuilder.mergeFrom(tableSchema_);
9008                 tableSchema_ = subBuilder.buildPartial();
9009               }
9010               bitField0_ |= 0x00000001;
9011               break;
9012             }
9013             case 18: {
9014               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
9015                 splitKeys_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
9016                 mutable_bitField0_ |= 0x00000002;
9017               }
9018               splitKeys_.add(input.readBytes());
9019               break;
9020             }
9021             case 24: {
9022               bitField0_ |= 0x00000002;
9023               nonceGroup_ = input.readUInt64();
9024               break;
9025             }
9026             case 32: {
9027               bitField0_ |= 0x00000004;
9028               nonce_ = input.readUInt64();
9029               break;
9030             }
9031           }
9032         }
9033       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9034         throw e.setUnfinishedMessage(this);
9035       } catch (java.io.IOException e) {
9036         throw new com.google.protobuf.InvalidProtocolBufferException(
9037             e.getMessage()).setUnfinishedMessage(this);
9038       } finally {
9039         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
9040           splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_);
9041         }
9042         this.unknownFields = unknownFields.build();
9043         makeExtensionsImmutable();
9044       }
9045     }
9046     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9047         getDescriptor() {
9048       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor;
9049     }
9050 
9051     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9052         internalGetFieldAccessorTable() {
9053       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_fieldAccessorTable
9054           .ensureFieldAccessorsInitialized(
9055               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.Builder.class);
9056     }
9057 
9058     public static com.google.protobuf.Parser<CreateTableRequest> PARSER =
9059         new com.google.protobuf.AbstractParser<CreateTableRequest>() {
9060       public CreateTableRequest parsePartialFrom(
9061           com.google.protobuf.CodedInputStream input,
9062           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9063           throws com.google.protobuf.InvalidProtocolBufferException {
9064         return new CreateTableRequest(input, extensionRegistry);
9065       }
9066     };
9067 
9068     @java.lang.Override
getParserForType()9069     public com.google.protobuf.Parser<CreateTableRequest> getParserForType() {
9070       return PARSER;
9071     }
9072 
9073     private int bitField0_;
9074     // required .TableSchema table_schema = 1;
9075     public static final int TABLE_SCHEMA_FIELD_NUMBER = 1;
9076     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_;
9077     /**
9078      * <code>required .TableSchema table_schema = 1;</code>
9079      */
hasTableSchema()9080     public boolean hasTableSchema() {
9081       return ((bitField0_ & 0x00000001) == 0x00000001);
9082     }
9083     /**
9084      * <code>required .TableSchema table_schema = 1;</code>
9085      */
getTableSchema()9086     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
9087       return tableSchema_;
9088     }
9089     /**
9090      * <code>required .TableSchema table_schema = 1;</code>
9091      */
getTableSchemaOrBuilder()9092     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
9093       return tableSchema_;
9094     }
9095 
9096     // repeated bytes split_keys = 2;
9097     public static final int SPLIT_KEYS_FIELD_NUMBER = 2;
9098     private java.util.List<com.google.protobuf.ByteString> splitKeys_;
9099     /**
9100      * <code>repeated bytes split_keys = 2;</code>
9101      */
9102     public java.util.List<com.google.protobuf.ByteString>
getSplitKeysList()9103         getSplitKeysList() {
9104       return splitKeys_;
9105     }
9106     /**
9107      * <code>repeated bytes split_keys = 2;</code>
9108      */
getSplitKeysCount()9109     public int getSplitKeysCount() {
9110       return splitKeys_.size();
9111     }
9112     /**
9113      * <code>repeated bytes split_keys = 2;</code>
9114      */
getSplitKeys(int index)9115     public com.google.protobuf.ByteString getSplitKeys(int index) {
9116       return splitKeys_.get(index);
9117     }
9118 
9119     // optional uint64 nonce_group = 3 [default = 0];
9120     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
9121     private long nonceGroup_;
9122     /**
9123      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
9124      */
hasNonceGroup()9125     public boolean hasNonceGroup() {
9126       return ((bitField0_ & 0x00000002) == 0x00000002);
9127     }
9128     /**
9129      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
9130      */
getNonceGroup()9131     public long getNonceGroup() {
9132       return nonceGroup_;
9133     }
9134 
9135     // optional uint64 nonce = 4 [default = 0];
9136     public static final int NONCE_FIELD_NUMBER = 4;
9137     private long nonce_;
9138     /**
9139      * <code>optional uint64 nonce = 4 [default = 0];</code>
9140      */
hasNonce()9141     public boolean hasNonce() {
9142       return ((bitField0_ & 0x00000004) == 0x00000004);
9143     }
9144     /**
9145      * <code>optional uint64 nonce = 4 [default = 0];</code>
9146      */
getNonce()9147     public long getNonce() {
9148       return nonce_;
9149     }
9150 
initFields()9151     private void initFields() {
9152       tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
9153       splitKeys_ = java.util.Collections.emptyList();
9154       nonceGroup_ = 0L;
9155       nonce_ = 0L;
9156     }
9157     private byte memoizedIsInitialized = -1;
isInitialized()9158     public final boolean isInitialized() {
9159       byte isInitialized = memoizedIsInitialized;
9160       if (isInitialized != -1) return isInitialized == 1;
9161 
9162       if (!hasTableSchema()) {
9163         memoizedIsInitialized = 0;
9164         return false;
9165       }
9166       if (!getTableSchema().isInitialized()) {
9167         memoizedIsInitialized = 0;
9168         return false;
9169       }
9170       memoizedIsInitialized = 1;
9171       return true;
9172     }
9173 
writeTo(com.google.protobuf.CodedOutputStream output)9174     public void writeTo(com.google.protobuf.CodedOutputStream output)
9175                         throws java.io.IOException {
9176       getSerializedSize();
9177       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9178         output.writeMessage(1, tableSchema_);
9179       }
9180       for (int i = 0; i < splitKeys_.size(); i++) {
9181         output.writeBytes(2, splitKeys_.get(i));
9182       }
9183       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9184         output.writeUInt64(3, nonceGroup_);
9185       }
9186       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9187         output.writeUInt64(4, nonce_);
9188       }
9189       getUnknownFields().writeTo(output);
9190     }
9191 
9192     private int memoizedSerializedSize = -1;
getSerializedSize()9193     public int getSerializedSize() {
9194       int size = memoizedSerializedSize;
9195       if (size != -1) return size;
9196 
9197       size = 0;
9198       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9199         size += com.google.protobuf.CodedOutputStream
9200           .computeMessageSize(1, tableSchema_);
9201       }
9202       {
9203         int dataSize = 0;
9204         for (int i = 0; i < splitKeys_.size(); i++) {
9205           dataSize += com.google.protobuf.CodedOutputStream
9206             .computeBytesSizeNoTag(splitKeys_.get(i));
9207         }
9208         size += dataSize;
9209         size += 1 * getSplitKeysList().size();
9210       }
9211       if (((bitField0_ & 0x00000002) == 0x00000002)) {
9212         size += com.google.protobuf.CodedOutputStream
9213           .computeUInt64Size(3, nonceGroup_);
9214       }
9215       if (((bitField0_ & 0x00000004) == 0x00000004)) {
9216         size += com.google.protobuf.CodedOutputStream
9217           .computeUInt64Size(4, nonce_);
9218       }
9219       size += getUnknownFields().getSerializedSize();
9220       memoizedSerializedSize = size;
9221       return size;
9222     }
9223 
9224     private static final long serialVersionUID = 0L;
9225     @java.lang.Override
writeReplace()9226     protected java.lang.Object writeReplace()
9227         throws java.io.ObjectStreamException {
9228       return super.writeReplace();
9229     }
9230 
9231     @java.lang.Override
equals(final java.lang.Object obj)9232     public boolean equals(final java.lang.Object obj) {
9233       if (obj == this) {
9234        return true;
9235       }
9236       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)) {
9237         return super.equals(obj);
9238       }
9239       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) obj;
9240 
9241       boolean result = true;
9242       result = result && (hasTableSchema() == other.hasTableSchema());
9243       if (hasTableSchema()) {
9244         result = result && getTableSchema()
9245             .equals(other.getTableSchema());
9246       }
9247       result = result && getSplitKeysList()
9248           .equals(other.getSplitKeysList());
9249       result = result && (hasNonceGroup() == other.hasNonceGroup());
9250       if (hasNonceGroup()) {
9251         result = result && (getNonceGroup()
9252             == other.getNonceGroup());
9253       }
9254       result = result && (hasNonce() == other.hasNonce());
9255       if (hasNonce()) {
9256         result = result && (getNonce()
9257             == other.getNonce());
9258       }
9259       result = result &&
9260           getUnknownFields().equals(other.getUnknownFields());
9261       return result;
9262     }
9263 
9264     private int memoizedHashCode = 0;
9265     @java.lang.Override
hashCode()9266     public int hashCode() {
9267       if (memoizedHashCode != 0) {
9268         return memoizedHashCode;
9269       }
9270       int hash = 41;
9271       hash = (19 * hash) + getDescriptorForType().hashCode();
9272       if (hasTableSchema()) {
9273         hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER;
9274         hash = (53 * hash) + getTableSchema().hashCode();
9275       }
9276       if (getSplitKeysCount() > 0) {
9277         hash = (37 * hash) + SPLIT_KEYS_FIELD_NUMBER;
9278         hash = (53 * hash) + getSplitKeysList().hashCode();
9279       }
9280       if (hasNonceGroup()) {
9281         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
9282         hash = (53 * hash) + hashLong(getNonceGroup());
9283       }
9284       if (hasNonce()) {
9285         hash = (37 * hash) + NONCE_FIELD_NUMBER;
9286         hash = (53 * hash) + hashLong(getNonce());
9287       }
9288       hash = (29 * hash) + getUnknownFields().hashCode();
9289       memoizedHashCode = hash;
9290       return hash;
9291     }
9292 
parseFrom( com.google.protobuf.ByteString data)9293     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(
9294         com.google.protobuf.ByteString data)
9295         throws com.google.protobuf.InvalidProtocolBufferException {
9296       return PARSER.parseFrom(data);
9297     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9298     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(
9299         com.google.protobuf.ByteString data,
9300         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9301         throws com.google.protobuf.InvalidProtocolBufferException {
9302       return PARSER.parseFrom(data, extensionRegistry);
9303     }
parseFrom(byte[] data)9304     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(byte[] data)
9305         throws com.google.protobuf.InvalidProtocolBufferException {
9306       return PARSER.parseFrom(data);
9307     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9308     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(
9309         byte[] data,
9310         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9311         throws com.google.protobuf.InvalidProtocolBufferException {
9312       return PARSER.parseFrom(data, extensionRegistry);
9313     }
parseFrom(java.io.InputStream input)9314     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(java.io.InputStream input)
9315         throws java.io.IOException {
9316       return PARSER.parseFrom(input);
9317     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9318     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(
9319         java.io.InputStream input,
9320         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9321         throws java.io.IOException {
9322       return PARSER.parseFrom(input, extensionRegistry);
9323     }
parseDelimitedFrom(java.io.InputStream input)9324     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input)
9325         throws java.io.IOException {
9326       return PARSER.parseDelimitedFrom(input);
9327     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9328     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseDelimitedFrom(
9329         java.io.InputStream input,
9330         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9331         throws java.io.IOException {
9332       return PARSER.parseDelimitedFrom(input, extensionRegistry);
9333     }
parseFrom( com.google.protobuf.CodedInputStream input)9334     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(
9335         com.google.protobuf.CodedInputStream input)
9336         throws java.io.IOException {
9337       return PARSER.parseFrom(input);
9338     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9339     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parseFrom(
9340         com.google.protobuf.CodedInputStream input,
9341         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9342         throws java.io.IOException {
9343       return PARSER.parseFrom(input, extensionRegistry);
9344     }
9345 
newBuilder()9346     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()9347     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest prototype)9348     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest prototype) {
9349       return newBuilder().mergeFrom(prototype);
9350     }
toBuilder()9351     public Builder toBuilder() { return newBuilder(this); }
9352 
9353     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9354     protected Builder newBuilderForType(
9355         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9356       Builder builder = new Builder(parent);
9357       return builder;
9358     }
9359     /**
9360      * Protobuf type {@code CreateTableRequest}
9361      */
9362     public static final class Builder extends
9363         com.google.protobuf.GeneratedMessage.Builder<Builder>
9364        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequestOrBuilder {
9365       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9366           getDescriptor() {
9367         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor;
9368       }
9369 
9370       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9371           internalGetFieldAccessorTable() {
9372         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_fieldAccessorTable
9373             .ensureFieldAccessorsInitialized(
9374                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.Builder.class);
9375       }
9376 
9377       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.newBuilder()
Builder()9378       private Builder() {
9379         maybeForceBuilderInitialization();
9380       }
9381 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9382       private Builder(
9383           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9384         super(parent);
9385         maybeForceBuilderInitialization();
9386       }
maybeForceBuilderInitialization()9387       private void maybeForceBuilderInitialization() {
9388         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9389           getTableSchemaFieldBuilder();
9390         }
9391       }
create()9392       private static Builder create() {
9393         return new Builder();
9394       }
9395 
clear()9396       public Builder clear() {
9397         super.clear();
9398         if (tableSchemaBuilder_ == null) {
9399           tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
9400         } else {
9401           tableSchemaBuilder_.clear();
9402         }
9403         bitField0_ = (bitField0_ & ~0x00000001);
9404         splitKeys_ = java.util.Collections.emptyList();
9405         bitField0_ = (bitField0_ & ~0x00000002);
9406         nonceGroup_ = 0L;
9407         bitField0_ = (bitField0_ & ~0x00000004);
9408         nonce_ = 0L;
9409         bitField0_ = (bitField0_ & ~0x00000008);
9410         return this;
9411       }
9412 
clone()9413       public Builder clone() {
9414         return create().mergeFrom(buildPartial());
9415       }
9416 
9417       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()9418           getDescriptorForType() {
9419         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableRequest_descriptor;
9420       }
9421 
getDefaultInstanceForType()9422       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest getDefaultInstanceForType() {
9423         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance();
9424       }
9425 
build()9426       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest build() {
9427         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = buildPartial();
9428         if (!result.isInitialized()) {
9429           throw newUninitializedMessageException(result);
9430         }
9431         return result;
9432       }
9433 
buildPartial()9434       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest buildPartial() {
9435         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest(this);
9436         int from_bitField0_ = bitField0_;
9437         int to_bitField0_ = 0;
9438         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9439           to_bitField0_ |= 0x00000001;
9440         }
9441         if (tableSchemaBuilder_ == null) {
9442           result.tableSchema_ = tableSchema_;
9443         } else {
9444           result.tableSchema_ = tableSchemaBuilder_.build();
9445         }
9446         if (((bitField0_ & 0x00000002) == 0x00000002)) {
9447           splitKeys_ = java.util.Collections.unmodifiableList(splitKeys_);
9448           bitField0_ = (bitField0_ & ~0x00000002);
9449         }
9450         result.splitKeys_ = splitKeys_;
9451         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9452           to_bitField0_ |= 0x00000002;
9453         }
9454         result.nonceGroup_ = nonceGroup_;
9455         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9456           to_bitField0_ |= 0x00000004;
9457         }
9458         result.nonce_ = nonce_;
9459         result.bitField0_ = to_bitField0_;
9460         onBuilt();
9461         return result;
9462       }
9463 
mergeFrom(com.google.protobuf.Message other)9464       public Builder mergeFrom(com.google.protobuf.Message other) {
9465         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) {
9466           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)other);
9467         } else {
9468           super.mergeFrom(other);
9469           return this;
9470         }
9471       }
9472 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other)9473       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest other) {
9474         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance()) return this;
9475         if (other.hasTableSchema()) {
9476           mergeTableSchema(other.getTableSchema());
9477         }
9478         if (!other.splitKeys_.isEmpty()) {
9479           if (splitKeys_.isEmpty()) {
9480             splitKeys_ = other.splitKeys_;
9481             bitField0_ = (bitField0_ & ~0x00000002);
9482           } else {
9483             ensureSplitKeysIsMutable();
9484             splitKeys_.addAll(other.splitKeys_);
9485           }
9486           onChanged();
9487         }
9488         if (other.hasNonceGroup()) {
9489           setNonceGroup(other.getNonceGroup());
9490         }
9491         if (other.hasNonce()) {
9492           setNonce(other.getNonce());
9493         }
9494         this.mergeUnknownFields(other.getUnknownFields());
9495         return this;
9496       }
9497 
isInitialized()9498       public final boolean isInitialized() {
9499         if (!hasTableSchema()) {
9500 
9501           return false;
9502         }
9503         if (!getTableSchema().isInitialized()) {
9504 
9505           return false;
9506         }
9507         return true;
9508       }
9509 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9510       public Builder mergeFrom(
9511           com.google.protobuf.CodedInputStream input,
9512           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9513           throws java.io.IOException {
9514         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest parsedMessage = null;
9515         try {
9516           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9517         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9518           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest) e.getUnfinishedMessage();
9519           throw e;
9520         } finally {
9521           if (parsedMessage != null) {
9522             mergeFrom(parsedMessage);
9523           }
9524         }
9525         return this;
9526       }
9527       private int bitField0_;
9528 
9529       // required .TableSchema table_schema = 1;
9530       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
9531       private com.google.protobuf.SingleFieldBuilder<
9532           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_;
9533       /**
9534        * <code>required .TableSchema table_schema = 1;</code>
9535        */
hasTableSchema()9536       public boolean hasTableSchema() {
9537         return ((bitField0_ & 0x00000001) == 0x00000001);
9538       }
9539       /**
9540        * <code>required .TableSchema table_schema = 1;</code>
9541        */
getTableSchema()9542       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
9543         if (tableSchemaBuilder_ == null) {
9544           return tableSchema_;
9545         } else {
9546           return tableSchemaBuilder_.getMessage();
9547         }
9548       }
9549       /**
9550        * <code>required .TableSchema table_schema = 1;</code>
9551        */
setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)9552       public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
9553         if (tableSchemaBuilder_ == null) {
9554           if (value == null) {
9555             throw new NullPointerException();
9556           }
9557           tableSchema_ = value;
9558           onChanged();
9559         } else {
9560           tableSchemaBuilder_.setMessage(value);
9561         }
9562         bitField0_ |= 0x00000001;
9563         return this;
9564       }
9565       /**
9566        * <code>required .TableSchema table_schema = 1;</code>
9567        */
setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)9568       public Builder setTableSchema(
9569           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
9570         if (tableSchemaBuilder_ == null) {
9571           tableSchema_ = builderForValue.build();
9572           onChanged();
9573         } else {
9574           tableSchemaBuilder_.setMessage(builderForValue.build());
9575         }
9576         bitField0_ |= 0x00000001;
9577         return this;
9578       }
9579       /**
9580        * <code>required .TableSchema table_schema = 1;</code>
9581        */
mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)9582       public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
9583         if (tableSchemaBuilder_ == null) {
9584           if (((bitField0_ & 0x00000001) == 0x00000001) &&
9585               tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) {
9586             tableSchema_ =
9587               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial();
9588           } else {
9589             tableSchema_ = value;
9590           }
9591           onChanged();
9592         } else {
9593           tableSchemaBuilder_.mergeFrom(value);
9594         }
9595         bitField0_ |= 0x00000001;
9596         return this;
9597       }
9598       /**
9599        * <code>required .TableSchema table_schema = 1;</code>
9600        */
clearTableSchema()9601       public Builder clearTableSchema() {
9602         if (tableSchemaBuilder_ == null) {
9603           tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
9604           onChanged();
9605         } else {
9606           tableSchemaBuilder_.clear();
9607         }
9608         bitField0_ = (bitField0_ & ~0x00000001);
9609         return this;
9610       }
9611       /**
9612        * <code>required .TableSchema table_schema = 1;</code>
9613        */
getTableSchemaBuilder()9614       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() {
9615         bitField0_ |= 0x00000001;
9616         onChanged();
9617         return getTableSchemaFieldBuilder().getBuilder();
9618       }
9619       /**
9620        * <code>required .TableSchema table_schema = 1;</code>
9621        */
getTableSchemaOrBuilder()9622       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
9623         if (tableSchemaBuilder_ != null) {
9624           return tableSchemaBuilder_.getMessageOrBuilder();
9625         } else {
9626           return tableSchema_;
9627         }
9628       }
9629       /**
9630        * <code>required .TableSchema table_schema = 1;</code>
9631        */
9632       private com.google.protobuf.SingleFieldBuilder<
9633           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaFieldBuilder()9634           getTableSchemaFieldBuilder() {
9635         if (tableSchemaBuilder_ == null) {
9636           tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9637               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
9638                   tableSchema_,
9639                   getParentForChildren(),
9640                   isClean());
9641           tableSchema_ = null;
9642         }
9643         return tableSchemaBuilder_;
9644       }
9645 
9646       // repeated bytes split_keys = 2;
9647       private java.util.List<com.google.protobuf.ByteString> splitKeys_ = java.util.Collections.emptyList();
ensureSplitKeysIsMutable()9648       private void ensureSplitKeysIsMutable() {
9649         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
9650           splitKeys_ = new java.util.ArrayList<com.google.protobuf.ByteString>(splitKeys_);
9651           bitField0_ |= 0x00000002;
9652          }
9653       }
9654       /**
9655        * <code>repeated bytes split_keys = 2;</code>
9656        */
9657       public java.util.List<com.google.protobuf.ByteString>
getSplitKeysList()9658           getSplitKeysList() {
9659         return java.util.Collections.unmodifiableList(splitKeys_);
9660       }
9661       /**
9662        * <code>repeated bytes split_keys = 2;</code>
9663        */
getSplitKeysCount()9664       public int getSplitKeysCount() {
9665         return splitKeys_.size();
9666       }
9667       /**
9668        * <code>repeated bytes split_keys = 2;</code>
9669        */
getSplitKeys(int index)9670       public com.google.protobuf.ByteString getSplitKeys(int index) {
9671         return splitKeys_.get(index);
9672       }
9673       /**
9674        * <code>repeated bytes split_keys = 2;</code>
9675        */
setSplitKeys( int index, com.google.protobuf.ByteString value)9676       public Builder setSplitKeys(
9677           int index, com.google.protobuf.ByteString value) {
9678         if (value == null) {
9679     throw new NullPointerException();
9680   }
9681   ensureSplitKeysIsMutable();
9682         splitKeys_.set(index, value);
9683         onChanged();
9684         return this;
9685       }
9686       /**
9687        * <code>repeated bytes split_keys = 2;</code>
9688        */
addSplitKeys(com.google.protobuf.ByteString value)9689       public Builder addSplitKeys(com.google.protobuf.ByteString value) {
9690         if (value == null) {
9691     throw new NullPointerException();
9692   }
9693   ensureSplitKeysIsMutable();
9694         splitKeys_.add(value);
9695         onChanged();
9696         return this;
9697       }
9698       /**
9699        * <code>repeated bytes split_keys = 2;</code>
9700        */
addAllSplitKeys( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)9701       public Builder addAllSplitKeys(
9702           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
9703         ensureSplitKeysIsMutable();
9704         super.addAll(values, splitKeys_);
9705         onChanged();
9706         return this;
9707       }
9708       /**
9709        * <code>repeated bytes split_keys = 2;</code>
9710        */
clearSplitKeys()9711       public Builder clearSplitKeys() {
9712         splitKeys_ = java.util.Collections.emptyList();
9713         bitField0_ = (bitField0_ & ~0x00000002);
9714         onChanged();
9715         return this;
9716       }
9717 
9718       // optional uint64 nonce_group = 3 [default = 0];
9719       private long nonceGroup_ ;
9720       /**
9721        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
9722        */
hasNonceGroup()9723       public boolean hasNonceGroup() {
9724         return ((bitField0_ & 0x00000004) == 0x00000004);
9725       }
9726       /**
9727        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
9728        */
getNonceGroup()9729       public long getNonceGroup() {
9730         return nonceGroup_;
9731       }
9732       /**
9733        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
9734        */
setNonceGroup(long value)9735       public Builder setNonceGroup(long value) {
9736         bitField0_ |= 0x00000004;
9737         nonceGroup_ = value;
9738         onChanged();
9739         return this;
9740       }
9741       /**
9742        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
9743        */
clearNonceGroup()9744       public Builder clearNonceGroup() {
9745         bitField0_ = (bitField0_ & ~0x00000004);
9746         nonceGroup_ = 0L;
9747         onChanged();
9748         return this;
9749       }
9750 
9751       // optional uint64 nonce = 4 [default = 0];
9752       private long nonce_ ;
9753       /**
9754        * <code>optional uint64 nonce = 4 [default = 0];</code>
9755        */
hasNonce()9756       public boolean hasNonce() {
9757         return ((bitField0_ & 0x00000008) == 0x00000008);
9758       }
9759       /**
9760        * <code>optional uint64 nonce = 4 [default = 0];</code>
9761        */
getNonce()9762       public long getNonce() {
9763         return nonce_;
9764       }
9765       /**
9766        * <code>optional uint64 nonce = 4 [default = 0];</code>
9767        */
setNonce(long value)9768       public Builder setNonce(long value) {
9769         bitField0_ |= 0x00000008;
9770         nonce_ = value;
9771         onChanged();
9772         return this;
9773       }
9774       /**
9775        * <code>optional uint64 nonce = 4 [default = 0];</code>
9776        */
clearNonce()9777       public Builder clearNonce() {
9778         bitField0_ = (bitField0_ & ~0x00000008);
9779         nonce_ = 0L;
9780         onChanged();
9781         return this;
9782       }
9783 
9784       // @@protoc_insertion_point(builder_scope:CreateTableRequest)
9785     }
9786 
9787     static {
9788       defaultInstance = new CreateTableRequest(true);
defaultInstance.initFields()9789       defaultInstance.initFields();
9790     }
9791 
9792     // @@protoc_insertion_point(class_scope:CreateTableRequest)
9793   }
9794 
9795   public interface CreateTableResponseOrBuilder
9796       extends com.google.protobuf.MessageOrBuilder {
9797 
9798     // optional uint64 proc_id = 1;
9799     /**
9800      * <code>optional uint64 proc_id = 1;</code>
9801      */
hasProcId()9802     boolean hasProcId();
9803     /**
9804      * <code>optional uint64 proc_id = 1;</code>
9805      */
getProcId()9806     long getProcId();
9807   }
9808   /**
9809    * Protobuf type {@code CreateTableResponse}
9810    */
9811   public static final class CreateTableResponse extends
9812       com.google.protobuf.GeneratedMessage
9813       implements CreateTableResponseOrBuilder {
9814     // Use CreateTableResponse.newBuilder() to construct.
CreateTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)9815     private CreateTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9816       super(builder);
9817       this.unknownFields = builder.getUnknownFields();
9818     }
CreateTableResponse(boolean noInit)9819     private CreateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9820 
9821     private static final CreateTableResponse defaultInstance;
getDefaultInstance()9822     public static CreateTableResponse getDefaultInstance() {
9823       return defaultInstance;
9824     }
9825 
getDefaultInstanceForType()9826     public CreateTableResponse getDefaultInstanceForType() {
9827       return defaultInstance;
9828     }
9829 
9830     private final com.google.protobuf.UnknownFieldSet unknownFields;
9831     @java.lang.Override
9832     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()9833         getUnknownFields() {
9834       return this.unknownFields;
9835     }
CreateTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9836     private CreateTableResponse(
9837         com.google.protobuf.CodedInputStream input,
9838         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9839         throws com.google.protobuf.InvalidProtocolBufferException {
9840       initFields();
9841       int mutable_bitField0_ = 0;
9842       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9843           com.google.protobuf.UnknownFieldSet.newBuilder();
9844       try {
9845         boolean done = false;
9846         while (!done) {
9847           int tag = input.readTag();
9848           switch (tag) {
9849             case 0:
9850               done = true;
9851               break;
9852             default: {
9853               if (!parseUnknownField(input, unknownFields,
9854                                      extensionRegistry, tag)) {
9855                 done = true;
9856               }
9857               break;
9858             }
9859             case 8: {
9860               bitField0_ |= 0x00000001;
9861               procId_ = input.readUInt64();
9862               break;
9863             }
9864           }
9865         }
9866       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9867         throw e.setUnfinishedMessage(this);
9868       } catch (java.io.IOException e) {
9869         throw new com.google.protobuf.InvalidProtocolBufferException(
9870             e.getMessage()).setUnfinishedMessage(this);
9871       } finally {
9872         this.unknownFields = unknownFields.build();
9873         makeExtensionsImmutable();
9874       }
9875     }
9876     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9877         getDescriptor() {
9878       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor;
9879     }
9880 
9881     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9882         internalGetFieldAccessorTable() {
9883       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_fieldAccessorTable
9884           .ensureFieldAccessorsInitialized(
9885               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.Builder.class);
9886     }
9887 
9888     public static com.google.protobuf.Parser<CreateTableResponse> PARSER =
9889         new com.google.protobuf.AbstractParser<CreateTableResponse>() {
9890       public CreateTableResponse parsePartialFrom(
9891           com.google.protobuf.CodedInputStream input,
9892           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9893           throws com.google.protobuf.InvalidProtocolBufferException {
9894         return new CreateTableResponse(input, extensionRegistry);
9895       }
9896     };
9897 
9898     @java.lang.Override
getParserForType()9899     public com.google.protobuf.Parser<CreateTableResponse> getParserForType() {
9900       return PARSER;
9901     }
9902 
9903     private int bitField0_;
9904     // optional uint64 proc_id = 1;
9905     public static final int PROC_ID_FIELD_NUMBER = 1;
9906     private long procId_;
9907     /**
9908      * <code>optional uint64 proc_id = 1;</code>
9909      */
hasProcId()9910     public boolean hasProcId() {
9911       return ((bitField0_ & 0x00000001) == 0x00000001);
9912     }
9913     /**
9914      * <code>optional uint64 proc_id = 1;</code>
9915      */
getProcId()9916     public long getProcId() {
9917       return procId_;
9918     }
9919 
initFields()9920     private void initFields() {
9921       procId_ = 0L;
9922     }
9923     private byte memoizedIsInitialized = -1;
isInitialized()9924     public final boolean isInitialized() {
9925       byte isInitialized = memoizedIsInitialized;
9926       if (isInitialized != -1) return isInitialized == 1;
9927 
9928       memoizedIsInitialized = 1;
9929       return true;
9930     }
9931 
writeTo(com.google.protobuf.CodedOutputStream output)9932     public void writeTo(com.google.protobuf.CodedOutputStream output)
9933                         throws java.io.IOException {
9934       getSerializedSize();
9935       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9936         output.writeUInt64(1, procId_);
9937       }
9938       getUnknownFields().writeTo(output);
9939     }
9940 
9941     private int memoizedSerializedSize = -1;
getSerializedSize()9942     public int getSerializedSize() {
9943       int size = memoizedSerializedSize;
9944       if (size != -1) return size;
9945 
9946       size = 0;
9947       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9948         size += com.google.protobuf.CodedOutputStream
9949           .computeUInt64Size(1, procId_);
9950       }
9951       size += getUnknownFields().getSerializedSize();
9952       memoizedSerializedSize = size;
9953       return size;
9954     }
9955 
9956     private static final long serialVersionUID = 0L;
9957     @java.lang.Override
writeReplace()9958     protected java.lang.Object writeReplace()
9959         throws java.io.ObjectStreamException {
9960       return super.writeReplace();
9961     }
9962 
9963     @java.lang.Override
equals(final java.lang.Object obj)9964     public boolean equals(final java.lang.Object obj) {
9965       if (obj == this) {
9966        return true;
9967       }
9968       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse)) {
9969         return super.equals(obj);
9970       }
9971       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) obj;
9972 
9973       boolean result = true;
9974       result = result && (hasProcId() == other.hasProcId());
9975       if (hasProcId()) {
9976         result = result && (getProcId()
9977             == other.getProcId());
9978       }
9979       result = result &&
9980           getUnknownFields().equals(other.getUnknownFields());
9981       return result;
9982     }
9983 
9984     private int memoizedHashCode = 0;
9985     @java.lang.Override
hashCode()9986     public int hashCode() {
9987       if (memoizedHashCode != 0) {
9988         return memoizedHashCode;
9989       }
9990       int hash = 41;
9991       hash = (19 * hash) + getDescriptorForType().hashCode();
9992       if (hasProcId()) {
9993         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
9994         hash = (53 * hash) + hashLong(getProcId());
9995       }
9996       hash = (29 * hash) + getUnknownFields().hashCode();
9997       memoizedHashCode = hash;
9998       return hash;
9999     }
10000 
parseFrom( com.google.protobuf.ByteString data)10001     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(
10002         com.google.protobuf.ByteString data)
10003         throws com.google.protobuf.InvalidProtocolBufferException {
10004       return PARSER.parseFrom(data);
10005     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10006     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(
10007         com.google.protobuf.ByteString data,
10008         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10009         throws com.google.protobuf.InvalidProtocolBufferException {
10010       return PARSER.parseFrom(data, extensionRegistry);
10011     }
parseFrom(byte[] data)10012     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(byte[] data)
10013         throws com.google.protobuf.InvalidProtocolBufferException {
10014       return PARSER.parseFrom(data);
10015     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10016     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(
10017         byte[] data,
10018         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10019         throws com.google.protobuf.InvalidProtocolBufferException {
10020       return PARSER.parseFrom(data, extensionRegistry);
10021     }
parseFrom(java.io.InputStream input)10022     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(java.io.InputStream input)
10023         throws java.io.IOException {
10024       return PARSER.parseFrom(input);
10025     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10026     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(
10027         java.io.InputStream input,
10028         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10029         throws java.io.IOException {
10030       return PARSER.parseFrom(input, extensionRegistry);
10031     }
parseDelimitedFrom(java.io.InputStream input)10032     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom(java.io.InputStream input)
10033         throws java.io.IOException {
10034       return PARSER.parseDelimitedFrom(input);
10035     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10036     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseDelimitedFrom(
10037         java.io.InputStream input,
10038         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10039         throws java.io.IOException {
10040       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10041     }
parseFrom( com.google.protobuf.CodedInputStream input)10042     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(
10043         com.google.protobuf.CodedInputStream input)
10044         throws java.io.IOException {
10045       return PARSER.parseFrom(input);
10046     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10047     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parseFrom(
10048         com.google.protobuf.CodedInputStream input,
10049         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10050         throws java.io.IOException {
10051       return PARSER.parseFrom(input, extensionRegistry);
10052     }
10053 
newBuilder()10054     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()10055     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse prototype)10056     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse prototype) {
10057       return newBuilder().mergeFrom(prototype);
10058     }
toBuilder()10059     public Builder toBuilder() { return newBuilder(this); }
10060 
10061     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10062     protected Builder newBuilderForType(
10063         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10064       Builder builder = new Builder(parent);
10065       return builder;
10066     }
10067     /**
10068      * Protobuf type {@code CreateTableResponse}
10069      */
10070     public static final class Builder extends
10071         com.google.protobuf.GeneratedMessage.Builder<Builder>
10072        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponseOrBuilder {
10073       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10074           getDescriptor() {
10075         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor;
10076       }
10077 
10078       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10079           internalGetFieldAccessorTable() {
10080         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_fieldAccessorTable
10081             .ensureFieldAccessorsInitialized(
10082                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.Builder.class);
10083       }
10084 
10085       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.newBuilder()
Builder()10086       private Builder() {
10087         maybeForceBuilderInitialization();
10088       }
10089 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10090       private Builder(
10091           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10092         super(parent);
10093         maybeForceBuilderInitialization();
10094       }
maybeForceBuilderInitialization()10095       private void maybeForceBuilderInitialization() {
10096         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10097         }
10098       }
create()10099       private static Builder create() {
10100         return new Builder();
10101       }
10102 
clear()10103       public Builder clear() {
10104         super.clear();
10105         procId_ = 0L;
10106         bitField0_ = (bitField0_ & ~0x00000001);
10107         return this;
10108       }
10109 
clone()10110       public Builder clone() {
10111         return create().mergeFrom(buildPartial());
10112       }
10113 
10114       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()10115           getDescriptorForType() {
10116         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateTableResponse_descriptor;
10117       }
10118 
getDefaultInstanceForType()10119       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse getDefaultInstanceForType() {
10120         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance();
10121       }
10122 
build()10123       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse build() {
10124         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = buildPartial();
10125         if (!result.isInitialized()) {
10126           throw newUninitializedMessageException(result);
10127         }
10128         return result;
10129       }
10130 
buildPartial()10131       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse buildPartial() {
10132         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse(this);
10133         int from_bitField0_ = bitField0_;
10134         int to_bitField0_ = 0;
10135         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10136           to_bitField0_ |= 0x00000001;
10137         }
10138         result.procId_ = procId_;
10139         result.bitField0_ = to_bitField0_;
10140         onBuilt();
10141         return result;
10142       }
10143 
mergeFrom(com.google.protobuf.Message other)10144       public Builder mergeFrom(com.google.protobuf.Message other) {
10145         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) {
10146           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse)other);
10147         } else {
10148           super.mergeFrom(other);
10149           return this;
10150         }
10151       }
10152 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other)10153       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse other) {
10154         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance()) return this;
10155         if (other.hasProcId()) {
10156           setProcId(other.getProcId());
10157         }
10158         this.mergeUnknownFields(other.getUnknownFields());
10159         return this;
10160       }
10161 
isInitialized()10162       public final boolean isInitialized() {
10163         return true;
10164       }
10165 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10166       public Builder mergeFrom(
10167           com.google.protobuf.CodedInputStream input,
10168           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10169           throws java.io.IOException {
10170         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse parsedMessage = null;
10171         try {
10172           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10173         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10174           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) e.getUnfinishedMessage();
10175           throw e;
10176         } finally {
10177           if (parsedMessage != null) {
10178             mergeFrom(parsedMessage);
10179           }
10180         }
10181         return this;
10182       }
10183       private int bitField0_;
10184 
10185       // optional uint64 proc_id = 1;
10186       private long procId_ ;
10187       /**
10188        * <code>optional uint64 proc_id = 1;</code>
10189        */
hasProcId()10190       public boolean hasProcId() {
10191         return ((bitField0_ & 0x00000001) == 0x00000001);
10192       }
10193       /**
10194        * <code>optional uint64 proc_id = 1;</code>
10195        */
getProcId()10196       public long getProcId() {
10197         return procId_;
10198       }
10199       /**
10200        * <code>optional uint64 proc_id = 1;</code>
10201        */
setProcId(long value)10202       public Builder setProcId(long value) {
10203         bitField0_ |= 0x00000001;
10204         procId_ = value;
10205         onChanged();
10206         return this;
10207       }
10208       /**
10209        * <code>optional uint64 proc_id = 1;</code>
10210        */
clearProcId()10211       public Builder clearProcId() {
10212         bitField0_ = (bitField0_ & ~0x00000001);
10213         procId_ = 0L;
10214         onChanged();
10215         return this;
10216       }
10217 
10218       // @@protoc_insertion_point(builder_scope:CreateTableResponse)
10219     }
10220 
10221     static {
10222       defaultInstance = new CreateTableResponse(true);
defaultInstance.initFields()10223       defaultInstance.initFields();
10224     }
10225 
10226     // @@protoc_insertion_point(class_scope:CreateTableResponse)
10227   }
10228 
10229   public interface DeleteTableRequestOrBuilder
10230       extends com.google.protobuf.MessageOrBuilder {
10231 
10232     // required .TableName table_name = 1;
10233     /**
10234      * <code>required .TableName table_name = 1;</code>
10235      */
hasTableName()10236     boolean hasTableName();
10237     /**
10238      * <code>required .TableName table_name = 1;</code>
10239      */
getTableName()10240     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
10241     /**
10242      * <code>required .TableName table_name = 1;</code>
10243      */
getTableNameOrBuilder()10244     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
10245 
10246     // optional uint64 nonce_group = 2 [default = 0];
10247     /**
10248      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10249      */
hasNonceGroup()10250     boolean hasNonceGroup();
10251     /**
10252      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10253      */
getNonceGroup()10254     long getNonceGroup();
10255 
10256     // optional uint64 nonce = 3 [default = 0];
10257     /**
10258      * <code>optional uint64 nonce = 3 [default = 0];</code>
10259      */
hasNonce()10260     boolean hasNonce();
10261     /**
10262      * <code>optional uint64 nonce = 3 [default = 0];</code>
10263      */
getNonce()10264     long getNonce();
10265   }
10266   /**
10267    * Protobuf type {@code DeleteTableRequest}
10268    */
10269   public static final class DeleteTableRequest extends
10270       com.google.protobuf.GeneratedMessage
10271       implements DeleteTableRequestOrBuilder {
10272     // Use DeleteTableRequest.newBuilder() to construct.
DeleteTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)10273     private DeleteTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10274       super(builder);
10275       this.unknownFields = builder.getUnknownFields();
10276     }
DeleteTableRequest(boolean noInit)10277     private DeleteTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10278 
10279     private static final DeleteTableRequest defaultInstance;
getDefaultInstance()10280     public static DeleteTableRequest getDefaultInstance() {
10281       return defaultInstance;
10282     }
10283 
getDefaultInstanceForType()10284     public DeleteTableRequest getDefaultInstanceForType() {
10285       return defaultInstance;
10286     }
10287 
10288     private final com.google.protobuf.UnknownFieldSet unknownFields;
10289     @java.lang.Override
10290     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()10291         getUnknownFields() {
10292       return this.unknownFields;
10293     }
DeleteTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10294     private DeleteTableRequest(
10295         com.google.protobuf.CodedInputStream input,
10296         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10297         throws com.google.protobuf.InvalidProtocolBufferException {
10298       initFields();
10299       int mutable_bitField0_ = 0;
10300       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10301           com.google.protobuf.UnknownFieldSet.newBuilder();
10302       try {
10303         boolean done = false;
10304         while (!done) {
10305           int tag = input.readTag();
10306           switch (tag) {
10307             case 0:
10308               done = true;
10309               break;
10310             default: {
10311               if (!parseUnknownField(input, unknownFields,
10312                                      extensionRegistry, tag)) {
10313                 done = true;
10314               }
10315               break;
10316             }
10317             case 10: {
10318               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
10319               if (((bitField0_ & 0x00000001) == 0x00000001)) {
10320                 subBuilder = tableName_.toBuilder();
10321               }
10322               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
10323               if (subBuilder != null) {
10324                 subBuilder.mergeFrom(tableName_);
10325                 tableName_ = subBuilder.buildPartial();
10326               }
10327               bitField0_ |= 0x00000001;
10328               break;
10329             }
10330             case 16: {
10331               bitField0_ |= 0x00000002;
10332               nonceGroup_ = input.readUInt64();
10333               break;
10334             }
10335             case 24: {
10336               bitField0_ |= 0x00000004;
10337               nonce_ = input.readUInt64();
10338               break;
10339             }
10340           }
10341         }
10342       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10343         throw e.setUnfinishedMessage(this);
10344       } catch (java.io.IOException e) {
10345         throw new com.google.protobuf.InvalidProtocolBufferException(
10346             e.getMessage()).setUnfinishedMessage(this);
10347       } finally {
10348         this.unknownFields = unknownFields.build();
10349         makeExtensionsImmutable();
10350       }
10351     }
10352     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10353         getDescriptor() {
10354       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor;
10355     }
10356 
10357     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10358         internalGetFieldAccessorTable() {
10359       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_fieldAccessorTable
10360           .ensureFieldAccessorsInitialized(
10361               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.Builder.class);
10362     }
10363 
10364     public static com.google.protobuf.Parser<DeleteTableRequest> PARSER =
10365         new com.google.protobuf.AbstractParser<DeleteTableRequest>() {
10366       public DeleteTableRequest parsePartialFrom(
10367           com.google.protobuf.CodedInputStream input,
10368           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10369           throws com.google.protobuf.InvalidProtocolBufferException {
10370         return new DeleteTableRequest(input, extensionRegistry);
10371       }
10372     };
10373 
10374     @java.lang.Override
getParserForType()10375     public com.google.protobuf.Parser<DeleteTableRequest> getParserForType() {
10376       return PARSER;
10377     }
10378 
10379     private int bitField0_;
10380     // required .TableName table_name = 1;
10381     public static final int TABLE_NAME_FIELD_NUMBER = 1;
10382     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
10383     /**
10384      * <code>required .TableName table_name = 1;</code>
10385      */
hasTableName()10386     public boolean hasTableName() {
10387       return ((bitField0_ & 0x00000001) == 0x00000001);
10388     }
10389     /**
10390      * <code>required .TableName table_name = 1;</code>
10391      */
getTableName()10392     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
10393       return tableName_;
10394     }
10395     /**
10396      * <code>required .TableName table_name = 1;</code>
10397      */
getTableNameOrBuilder()10398     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
10399       return tableName_;
10400     }
10401 
10402     // optional uint64 nonce_group = 2 [default = 0];
10403     public static final int NONCE_GROUP_FIELD_NUMBER = 2;
10404     private long nonceGroup_;
10405     /**
10406      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10407      */
hasNonceGroup()10408     public boolean hasNonceGroup() {
10409       return ((bitField0_ & 0x00000002) == 0x00000002);
10410     }
10411     /**
10412      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10413      */
getNonceGroup()10414     public long getNonceGroup() {
10415       return nonceGroup_;
10416     }
10417 
10418     // optional uint64 nonce = 3 [default = 0];
10419     public static final int NONCE_FIELD_NUMBER = 3;
10420     private long nonce_;
10421     /**
10422      * <code>optional uint64 nonce = 3 [default = 0];</code>
10423      */
hasNonce()10424     public boolean hasNonce() {
10425       return ((bitField0_ & 0x00000004) == 0x00000004);
10426     }
10427     /**
10428      * <code>optional uint64 nonce = 3 [default = 0];</code>
10429      */
getNonce()10430     public long getNonce() {
10431       return nonce_;
10432     }
10433 
initFields()10434     private void initFields() {
10435       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
10436       nonceGroup_ = 0L;
10437       nonce_ = 0L;
10438     }
10439     private byte memoizedIsInitialized = -1;
isInitialized()10440     public final boolean isInitialized() {
10441       byte isInitialized = memoizedIsInitialized;
10442       if (isInitialized != -1) return isInitialized == 1;
10443 
10444       if (!hasTableName()) {
10445         memoizedIsInitialized = 0;
10446         return false;
10447       }
10448       if (!getTableName().isInitialized()) {
10449         memoizedIsInitialized = 0;
10450         return false;
10451       }
10452       memoizedIsInitialized = 1;
10453       return true;
10454     }
10455 
writeTo(com.google.protobuf.CodedOutputStream output)10456     public void writeTo(com.google.protobuf.CodedOutputStream output)
10457                         throws java.io.IOException {
10458       getSerializedSize();
10459       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10460         output.writeMessage(1, tableName_);
10461       }
10462       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10463         output.writeUInt64(2, nonceGroup_);
10464       }
10465       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10466         output.writeUInt64(3, nonce_);
10467       }
10468       getUnknownFields().writeTo(output);
10469     }
10470 
10471     private int memoizedSerializedSize = -1;
getSerializedSize()10472     public int getSerializedSize() {
10473       int size = memoizedSerializedSize;
10474       if (size != -1) return size;
10475 
10476       size = 0;
10477       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10478         size += com.google.protobuf.CodedOutputStream
10479           .computeMessageSize(1, tableName_);
10480       }
10481       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10482         size += com.google.protobuf.CodedOutputStream
10483           .computeUInt64Size(2, nonceGroup_);
10484       }
10485       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10486         size += com.google.protobuf.CodedOutputStream
10487           .computeUInt64Size(3, nonce_);
10488       }
10489       size += getUnknownFields().getSerializedSize();
10490       memoizedSerializedSize = size;
10491       return size;
10492     }
10493 
10494     private static final long serialVersionUID = 0L;
10495     @java.lang.Override
writeReplace()10496     protected java.lang.Object writeReplace()
10497         throws java.io.ObjectStreamException {
10498       return super.writeReplace();
10499     }
10500 
10501     @java.lang.Override
equals(final java.lang.Object obj)10502     public boolean equals(final java.lang.Object obj) {
10503       if (obj == this) {
10504        return true;
10505       }
10506       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)) {
10507         return super.equals(obj);
10508       }
10509       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) obj;
10510 
10511       boolean result = true;
10512       result = result && (hasTableName() == other.hasTableName());
10513       if (hasTableName()) {
10514         result = result && getTableName()
10515             .equals(other.getTableName());
10516       }
10517       result = result && (hasNonceGroup() == other.hasNonceGroup());
10518       if (hasNonceGroup()) {
10519         result = result && (getNonceGroup()
10520             == other.getNonceGroup());
10521       }
10522       result = result && (hasNonce() == other.hasNonce());
10523       if (hasNonce()) {
10524         result = result && (getNonce()
10525             == other.getNonce());
10526       }
10527       result = result &&
10528           getUnknownFields().equals(other.getUnknownFields());
10529       return result;
10530     }
10531 
10532     private int memoizedHashCode = 0;
10533     @java.lang.Override
hashCode()10534     public int hashCode() {
10535       if (memoizedHashCode != 0) {
10536         return memoizedHashCode;
10537       }
10538       int hash = 41;
10539       hash = (19 * hash) + getDescriptorForType().hashCode();
10540       if (hasTableName()) {
10541         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
10542         hash = (53 * hash) + getTableName().hashCode();
10543       }
10544       if (hasNonceGroup()) {
10545         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
10546         hash = (53 * hash) + hashLong(getNonceGroup());
10547       }
10548       if (hasNonce()) {
10549         hash = (37 * hash) + NONCE_FIELD_NUMBER;
10550         hash = (53 * hash) + hashLong(getNonce());
10551       }
10552       hash = (29 * hash) + getUnknownFields().hashCode();
10553       memoizedHashCode = hash;
10554       return hash;
10555     }
10556 
parseFrom( com.google.protobuf.ByteString data)10557     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(
10558         com.google.protobuf.ByteString data)
10559         throws com.google.protobuf.InvalidProtocolBufferException {
10560       return PARSER.parseFrom(data);
10561     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10562     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(
10563         com.google.protobuf.ByteString data,
10564         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10565         throws com.google.protobuf.InvalidProtocolBufferException {
10566       return PARSER.parseFrom(data, extensionRegistry);
10567     }
parseFrom(byte[] data)10568     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(byte[] data)
10569         throws com.google.protobuf.InvalidProtocolBufferException {
10570       return PARSER.parseFrom(data);
10571     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10572     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(
10573         byte[] data,
10574         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10575         throws com.google.protobuf.InvalidProtocolBufferException {
10576       return PARSER.parseFrom(data, extensionRegistry);
10577     }
parseFrom(java.io.InputStream input)10578     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(java.io.InputStream input)
10579         throws java.io.IOException {
10580       return PARSER.parseFrom(input);
10581     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10582     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(
10583         java.io.InputStream input,
10584         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10585         throws java.io.IOException {
10586       return PARSER.parseFrom(input, extensionRegistry);
10587     }
parseDelimitedFrom(java.io.InputStream input)10588     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom(java.io.InputStream input)
10589         throws java.io.IOException {
10590       return PARSER.parseDelimitedFrom(input);
10591     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10592     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseDelimitedFrom(
10593         java.io.InputStream input,
10594         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10595         throws java.io.IOException {
10596       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10597     }
parseFrom( com.google.protobuf.CodedInputStream input)10598     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(
10599         com.google.protobuf.CodedInputStream input)
10600         throws java.io.IOException {
10601       return PARSER.parseFrom(input);
10602     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10603     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parseFrom(
10604         com.google.protobuf.CodedInputStream input,
10605         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10606         throws java.io.IOException {
10607       return PARSER.parseFrom(input, extensionRegistry);
10608     }
10609 
newBuilder()10610     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()10611     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest prototype)10612     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest prototype) {
10613       return newBuilder().mergeFrom(prototype);
10614     }
toBuilder()10615     public Builder toBuilder() { return newBuilder(this); }
10616 
10617     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10618     protected Builder newBuilderForType(
10619         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10620       Builder builder = new Builder(parent);
10621       return builder;
10622     }
10623     /**
10624      * Protobuf type {@code DeleteTableRequest}
10625      */
10626     public static final class Builder extends
10627         com.google.protobuf.GeneratedMessage.Builder<Builder>
10628        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequestOrBuilder {
10629       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10630           getDescriptor() {
10631         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor;
10632       }
10633 
10634       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10635           internalGetFieldAccessorTable() {
10636         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_fieldAccessorTable
10637             .ensureFieldAccessorsInitialized(
10638                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.Builder.class);
10639       }
10640 
10641       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.newBuilder()
Builder()10642       private Builder() {
10643         maybeForceBuilderInitialization();
10644       }
10645 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10646       private Builder(
10647           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10648         super(parent);
10649         maybeForceBuilderInitialization();
10650       }
maybeForceBuilderInitialization()10651       private void maybeForceBuilderInitialization() {
10652         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10653           getTableNameFieldBuilder();
10654         }
10655       }
create()10656       private static Builder create() {
10657         return new Builder();
10658       }
10659 
clear()10660       public Builder clear() {
10661         super.clear();
10662         if (tableNameBuilder_ == null) {
10663           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
10664         } else {
10665           tableNameBuilder_.clear();
10666         }
10667         bitField0_ = (bitField0_ & ~0x00000001);
10668         nonceGroup_ = 0L;
10669         bitField0_ = (bitField0_ & ~0x00000002);
10670         nonce_ = 0L;
10671         bitField0_ = (bitField0_ & ~0x00000004);
10672         return this;
10673       }
10674 
clone()10675       public Builder clone() {
10676         return create().mergeFrom(buildPartial());
10677       }
10678 
10679       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()10680           getDescriptorForType() {
10681         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableRequest_descriptor;
10682       }
10683 
getDefaultInstanceForType()10684       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest getDefaultInstanceForType() {
10685         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance();
10686       }
10687 
build()10688       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest build() {
10689         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = buildPartial();
10690         if (!result.isInitialized()) {
10691           throw newUninitializedMessageException(result);
10692         }
10693         return result;
10694       }
10695 
buildPartial()10696       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest buildPartial() {
10697         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest(this);
10698         int from_bitField0_ = bitField0_;
10699         int to_bitField0_ = 0;
10700         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10701           to_bitField0_ |= 0x00000001;
10702         }
10703         if (tableNameBuilder_ == null) {
10704           result.tableName_ = tableName_;
10705         } else {
10706           result.tableName_ = tableNameBuilder_.build();
10707         }
10708         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
10709           to_bitField0_ |= 0x00000002;
10710         }
10711         result.nonceGroup_ = nonceGroup_;
10712         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
10713           to_bitField0_ |= 0x00000004;
10714         }
10715         result.nonce_ = nonce_;
10716         result.bitField0_ = to_bitField0_;
10717         onBuilt();
10718         return result;
10719       }
10720 
mergeFrom(com.google.protobuf.Message other)10721       public Builder mergeFrom(com.google.protobuf.Message other) {
10722         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) {
10723           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)other);
10724         } else {
10725           super.mergeFrom(other);
10726           return this;
10727         }
10728       }
10729 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other)10730       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest other) {
10731         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance()) return this;
10732         if (other.hasTableName()) {
10733           mergeTableName(other.getTableName());
10734         }
10735         if (other.hasNonceGroup()) {
10736           setNonceGroup(other.getNonceGroup());
10737         }
10738         if (other.hasNonce()) {
10739           setNonce(other.getNonce());
10740         }
10741         this.mergeUnknownFields(other.getUnknownFields());
10742         return this;
10743       }
10744 
isInitialized()10745       public final boolean isInitialized() {
10746         if (!hasTableName()) {
10747 
10748           return false;
10749         }
10750         if (!getTableName().isInitialized()) {
10751 
10752           return false;
10753         }
10754         return true;
10755       }
10756 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10757       public Builder mergeFrom(
10758           com.google.protobuf.CodedInputStream input,
10759           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10760           throws java.io.IOException {
10761         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest parsedMessage = null;
10762         try {
10763           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10764         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10765           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest) e.getUnfinishedMessage();
10766           throw e;
10767         } finally {
10768           if (parsedMessage != null) {
10769             mergeFrom(parsedMessage);
10770           }
10771         }
10772         return this;
10773       }
10774       private int bitField0_;
10775 
10776       // required .TableName table_name = 1;
10777       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
10778       private com.google.protobuf.SingleFieldBuilder<
10779           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
10780       /**
10781        * <code>required .TableName table_name = 1;</code>
10782        */
hasTableName()10783       public boolean hasTableName() {
10784         return ((bitField0_ & 0x00000001) == 0x00000001);
10785       }
10786       /**
10787        * <code>required .TableName table_name = 1;</code>
10788        */
getTableName()10789       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
10790         if (tableNameBuilder_ == null) {
10791           return tableName_;
10792         } else {
10793           return tableNameBuilder_.getMessage();
10794         }
10795       }
10796       /**
10797        * <code>required .TableName table_name = 1;</code>
10798        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)10799       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
10800         if (tableNameBuilder_ == null) {
10801           if (value == null) {
10802             throw new NullPointerException();
10803           }
10804           tableName_ = value;
10805           onChanged();
10806         } else {
10807           tableNameBuilder_.setMessage(value);
10808         }
10809         bitField0_ |= 0x00000001;
10810         return this;
10811       }
10812       /**
10813        * <code>required .TableName table_name = 1;</code>
10814        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)10815       public Builder setTableName(
10816           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
10817         if (tableNameBuilder_ == null) {
10818           tableName_ = builderForValue.build();
10819           onChanged();
10820         } else {
10821           tableNameBuilder_.setMessage(builderForValue.build());
10822         }
10823         bitField0_ |= 0x00000001;
10824         return this;
10825       }
10826       /**
10827        * <code>required .TableName table_name = 1;</code>
10828        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)10829       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
10830         if (tableNameBuilder_ == null) {
10831           if (((bitField0_ & 0x00000001) == 0x00000001) &&
10832               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
10833             tableName_ =
10834               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
10835           } else {
10836             tableName_ = value;
10837           }
10838           onChanged();
10839         } else {
10840           tableNameBuilder_.mergeFrom(value);
10841         }
10842         bitField0_ |= 0x00000001;
10843         return this;
10844       }
10845       /**
10846        * <code>required .TableName table_name = 1;</code>
10847        */
clearTableName()10848       public Builder clearTableName() {
10849         if (tableNameBuilder_ == null) {
10850           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
10851           onChanged();
10852         } else {
10853           tableNameBuilder_.clear();
10854         }
10855         bitField0_ = (bitField0_ & ~0x00000001);
10856         return this;
10857       }
10858       /**
10859        * <code>required .TableName table_name = 1;</code>
10860        */
getTableNameBuilder()10861       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
10862         bitField0_ |= 0x00000001;
10863         onChanged();
10864         return getTableNameFieldBuilder().getBuilder();
10865       }
10866       /**
10867        * <code>required .TableName table_name = 1;</code>
10868        */
getTableNameOrBuilder()10869       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
10870         if (tableNameBuilder_ != null) {
10871           return tableNameBuilder_.getMessageOrBuilder();
10872         } else {
10873           return tableName_;
10874         }
10875       }
10876       /**
10877        * <code>required .TableName table_name = 1;</code>
10878        */
10879       private com.google.protobuf.SingleFieldBuilder<
10880           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()10881           getTableNameFieldBuilder() {
10882         if (tableNameBuilder_ == null) {
10883           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
10884               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
10885                   tableName_,
10886                   getParentForChildren(),
10887                   isClean());
10888           tableName_ = null;
10889         }
10890         return tableNameBuilder_;
10891       }
10892 
10893       // optional uint64 nonce_group = 2 [default = 0];
10894       private long nonceGroup_ ;
10895       /**
10896        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10897        */
hasNonceGroup()10898       public boolean hasNonceGroup() {
10899         return ((bitField0_ & 0x00000002) == 0x00000002);
10900       }
10901       /**
10902        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10903        */
getNonceGroup()10904       public long getNonceGroup() {
10905         return nonceGroup_;
10906       }
10907       /**
10908        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10909        */
setNonceGroup(long value)10910       public Builder setNonceGroup(long value) {
10911         bitField0_ |= 0x00000002;
10912         nonceGroup_ = value;
10913         onChanged();
10914         return this;
10915       }
10916       /**
10917        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
10918        */
clearNonceGroup()10919       public Builder clearNonceGroup() {
10920         bitField0_ = (bitField0_ & ~0x00000002);
10921         nonceGroup_ = 0L;
10922         onChanged();
10923         return this;
10924       }
10925 
10926       // optional uint64 nonce = 3 [default = 0];
10927       private long nonce_ ;
10928       /**
10929        * <code>optional uint64 nonce = 3 [default = 0];</code>
10930        */
hasNonce()10931       public boolean hasNonce() {
10932         return ((bitField0_ & 0x00000004) == 0x00000004);
10933       }
10934       /**
10935        * <code>optional uint64 nonce = 3 [default = 0];</code>
10936        */
getNonce()10937       public long getNonce() {
10938         return nonce_;
10939       }
10940       /**
10941        * <code>optional uint64 nonce = 3 [default = 0];</code>
10942        */
setNonce(long value)10943       public Builder setNonce(long value) {
10944         bitField0_ |= 0x00000004;
10945         nonce_ = value;
10946         onChanged();
10947         return this;
10948       }
10949       /**
10950        * <code>optional uint64 nonce = 3 [default = 0];</code>
10951        */
clearNonce()10952       public Builder clearNonce() {
10953         bitField0_ = (bitField0_ & ~0x00000004);
10954         nonce_ = 0L;
10955         onChanged();
10956         return this;
10957       }
10958 
10959       // @@protoc_insertion_point(builder_scope:DeleteTableRequest)
10960     }
10961 
10962     static {
10963       defaultInstance = new DeleteTableRequest(true);
defaultInstance.initFields()10964       defaultInstance.initFields();
10965     }
10966 
10967     // @@protoc_insertion_point(class_scope:DeleteTableRequest)
10968   }
10969 
10970   public interface DeleteTableResponseOrBuilder
10971       extends com.google.protobuf.MessageOrBuilder {
10972 
10973     // optional uint64 proc_id = 1;
10974     /**
10975      * <code>optional uint64 proc_id = 1;</code>
10976      */
hasProcId()10977     boolean hasProcId();
10978     /**
10979      * <code>optional uint64 proc_id = 1;</code>
10980      */
getProcId()10981     long getProcId();
10982   }
10983   /**
10984    * Protobuf type {@code DeleteTableResponse}
10985    */
10986   public static final class DeleteTableResponse extends
10987       com.google.protobuf.GeneratedMessage
10988       implements DeleteTableResponseOrBuilder {
10989     // Use DeleteTableResponse.newBuilder() to construct.
DeleteTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)10990     private DeleteTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10991       super(builder);
10992       this.unknownFields = builder.getUnknownFields();
10993     }
DeleteTableResponse(boolean noInit)10994     private DeleteTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10995 
10996     private static final DeleteTableResponse defaultInstance;
getDefaultInstance()10997     public static DeleteTableResponse getDefaultInstance() {
10998       return defaultInstance;
10999     }
11000 
getDefaultInstanceForType()11001     public DeleteTableResponse getDefaultInstanceForType() {
11002       return defaultInstance;
11003     }
11004 
11005     private final com.google.protobuf.UnknownFieldSet unknownFields;
11006     @java.lang.Override
11007     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()11008         getUnknownFields() {
11009       return this.unknownFields;
11010     }
DeleteTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11011     private DeleteTableResponse(
11012         com.google.protobuf.CodedInputStream input,
11013         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11014         throws com.google.protobuf.InvalidProtocolBufferException {
11015       initFields();
11016       int mutable_bitField0_ = 0;
11017       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11018           com.google.protobuf.UnknownFieldSet.newBuilder();
11019       try {
11020         boolean done = false;
11021         while (!done) {
11022           int tag = input.readTag();
11023           switch (tag) {
11024             case 0:
11025               done = true;
11026               break;
11027             default: {
11028               if (!parseUnknownField(input, unknownFields,
11029                                      extensionRegistry, tag)) {
11030                 done = true;
11031               }
11032               break;
11033             }
11034             case 8: {
11035               bitField0_ |= 0x00000001;
11036               procId_ = input.readUInt64();
11037               break;
11038             }
11039           }
11040         }
11041       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11042         throw e.setUnfinishedMessage(this);
11043       } catch (java.io.IOException e) {
11044         throw new com.google.protobuf.InvalidProtocolBufferException(
11045             e.getMessage()).setUnfinishedMessage(this);
11046       } finally {
11047         this.unknownFields = unknownFields.build();
11048         makeExtensionsImmutable();
11049       }
11050     }
11051     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11052         getDescriptor() {
11053       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor;
11054     }
11055 
11056     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11057         internalGetFieldAccessorTable() {
11058       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_fieldAccessorTable
11059           .ensureFieldAccessorsInitialized(
11060               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.Builder.class);
11061     }
11062 
11063     public static com.google.protobuf.Parser<DeleteTableResponse> PARSER =
11064         new com.google.protobuf.AbstractParser<DeleteTableResponse>() {
11065       public DeleteTableResponse parsePartialFrom(
11066           com.google.protobuf.CodedInputStream input,
11067           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11068           throws com.google.protobuf.InvalidProtocolBufferException {
11069         return new DeleteTableResponse(input, extensionRegistry);
11070       }
11071     };
11072 
11073     @java.lang.Override
getParserForType()11074     public com.google.protobuf.Parser<DeleteTableResponse> getParserForType() {
11075       return PARSER;
11076     }
11077 
11078     private int bitField0_;
11079     // optional uint64 proc_id = 1;
11080     public static final int PROC_ID_FIELD_NUMBER = 1;
11081     private long procId_;
11082     /**
11083      * <code>optional uint64 proc_id = 1;</code>
11084      */
hasProcId()11085     public boolean hasProcId() {
11086       return ((bitField0_ & 0x00000001) == 0x00000001);
11087     }
11088     /**
11089      * <code>optional uint64 proc_id = 1;</code>
11090      */
getProcId()11091     public long getProcId() {
11092       return procId_;
11093     }
11094 
initFields()11095     private void initFields() {
11096       procId_ = 0L;
11097     }
11098     private byte memoizedIsInitialized = -1;
isInitialized()11099     public final boolean isInitialized() {
11100       byte isInitialized = memoizedIsInitialized;
11101       if (isInitialized != -1) return isInitialized == 1;
11102 
11103       memoizedIsInitialized = 1;
11104       return true;
11105     }
11106 
writeTo(com.google.protobuf.CodedOutputStream output)11107     public void writeTo(com.google.protobuf.CodedOutputStream output)
11108                         throws java.io.IOException {
11109       getSerializedSize();
11110       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11111         output.writeUInt64(1, procId_);
11112       }
11113       getUnknownFields().writeTo(output);
11114     }
11115 
11116     private int memoizedSerializedSize = -1;
getSerializedSize()11117     public int getSerializedSize() {
11118       int size = memoizedSerializedSize;
11119       if (size != -1) return size;
11120 
11121       size = 0;
11122       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11123         size += com.google.protobuf.CodedOutputStream
11124           .computeUInt64Size(1, procId_);
11125       }
11126       size += getUnknownFields().getSerializedSize();
11127       memoizedSerializedSize = size;
11128       return size;
11129     }
11130 
11131     private static final long serialVersionUID = 0L;
11132     @java.lang.Override
writeReplace()11133     protected java.lang.Object writeReplace()
11134         throws java.io.ObjectStreamException {
11135       return super.writeReplace();
11136     }
11137 
11138     @java.lang.Override
equals(final java.lang.Object obj)11139     public boolean equals(final java.lang.Object obj) {
11140       if (obj == this) {
11141        return true;
11142       }
11143       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse)) {
11144         return super.equals(obj);
11145       }
11146       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) obj;
11147 
11148       boolean result = true;
11149       result = result && (hasProcId() == other.hasProcId());
11150       if (hasProcId()) {
11151         result = result && (getProcId()
11152             == other.getProcId());
11153       }
11154       result = result &&
11155           getUnknownFields().equals(other.getUnknownFields());
11156       return result;
11157     }
11158 
11159     private int memoizedHashCode = 0;
11160     @java.lang.Override
hashCode()11161     public int hashCode() {
11162       if (memoizedHashCode != 0) {
11163         return memoizedHashCode;
11164       }
11165       int hash = 41;
11166       hash = (19 * hash) + getDescriptorForType().hashCode();
11167       if (hasProcId()) {
11168         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
11169         hash = (53 * hash) + hashLong(getProcId());
11170       }
11171       hash = (29 * hash) + getUnknownFields().hashCode();
11172       memoizedHashCode = hash;
11173       return hash;
11174     }
11175 
parseFrom( com.google.protobuf.ByteString data)11176     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(
11177         com.google.protobuf.ByteString data)
11178         throws com.google.protobuf.InvalidProtocolBufferException {
11179       return PARSER.parseFrom(data);
11180     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11181     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(
11182         com.google.protobuf.ByteString data,
11183         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11184         throws com.google.protobuf.InvalidProtocolBufferException {
11185       return PARSER.parseFrom(data, extensionRegistry);
11186     }
parseFrom(byte[] data)11187     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(byte[] data)
11188         throws com.google.protobuf.InvalidProtocolBufferException {
11189       return PARSER.parseFrom(data);
11190     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11191     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(
11192         byte[] data,
11193         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11194         throws com.google.protobuf.InvalidProtocolBufferException {
11195       return PARSER.parseFrom(data, extensionRegistry);
11196     }
parseFrom(java.io.InputStream input)11197     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(java.io.InputStream input)
11198         throws java.io.IOException {
11199       return PARSER.parseFrom(input);
11200     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11201     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(
11202         java.io.InputStream input,
11203         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11204         throws java.io.IOException {
11205       return PARSER.parseFrom(input, extensionRegistry);
11206     }
parseDelimitedFrom(java.io.InputStream input)11207     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom(java.io.InputStream input)
11208         throws java.io.IOException {
11209       return PARSER.parseDelimitedFrom(input);
11210     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11211     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseDelimitedFrom(
11212         java.io.InputStream input,
11213         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11214         throws java.io.IOException {
11215       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11216     }
parseFrom( com.google.protobuf.CodedInputStream input)11217     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(
11218         com.google.protobuf.CodedInputStream input)
11219         throws java.io.IOException {
11220       return PARSER.parseFrom(input);
11221     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11222     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parseFrom(
11223         com.google.protobuf.CodedInputStream input,
11224         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11225         throws java.io.IOException {
11226       return PARSER.parseFrom(input, extensionRegistry);
11227     }
11228 
newBuilder()11229     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()11230     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse prototype)11231     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse prototype) {
11232       return newBuilder().mergeFrom(prototype);
11233     }
toBuilder()11234     public Builder toBuilder() { return newBuilder(this); }
11235 
11236     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11237     protected Builder newBuilderForType(
11238         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11239       Builder builder = new Builder(parent);
11240       return builder;
11241     }
11242     /**
11243      * Protobuf type {@code DeleteTableResponse}
11244      */
11245     public static final class Builder extends
11246         com.google.protobuf.GeneratedMessage.Builder<Builder>
11247        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponseOrBuilder {
11248       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11249           getDescriptor() {
11250         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor;
11251       }
11252 
11253       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11254           internalGetFieldAccessorTable() {
11255         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_fieldAccessorTable
11256             .ensureFieldAccessorsInitialized(
11257                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.Builder.class);
11258       }
11259 
11260       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.newBuilder()
Builder()11261       private Builder() {
11262         maybeForceBuilderInitialization();
11263       }
11264 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11265       private Builder(
11266           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11267         super(parent);
11268         maybeForceBuilderInitialization();
11269       }
maybeForceBuilderInitialization()11270       private void maybeForceBuilderInitialization() {
11271         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11272         }
11273       }
create()11274       private static Builder create() {
11275         return new Builder();
11276       }
11277 
clear()11278       public Builder clear() {
11279         super.clear();
11280         procId_ = 0L;
11281         bitField0_ = (bitField0_ & ~0x00000001);
11282         return this;
11283       }
11284 
clone()11285       public Builder clone() {
11286         return create().mergeFrom(buildPartial());
11287       }
11288 
11289       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()11290           getDescriptorForType() {
11291         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteTableResponse_descriptor;
11292       }
11293 
getDefaultInstanceForType()11294       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse getDefaultInstanceForType() {
11295         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance();
11296       }
11297 
build()11298       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse build() {
11299         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = buildPartial();
11300         if (!result.isInitialized()) {
11301           throw newUninitializedMessageException(result);
11302         }
11303         return result;
11304       }
11305 
buildPartial()11306       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse buildPartial() {
11307         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse(this);
11308         int from_bitField0_ = bitField0_;
11309         int to_bitField0_ = 0;
11310         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11311           to_bitField0_ |= 0x00000001;
11312         }
11313         result.procId_ = procId_;
11314         result.bitField0_ = to_bitField0_;
11315         onBuilt();
11316         return result;
11317       }
11318 
mergeFrom(com.google.protobuf.Message other)11319       public Builder mergeFrom(com.google.protobuf.Message other) {
11320         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) {
11321           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse)other);
11322         } else {
11323           super.mergeFrom(other);
11324           return this;
11325         }
11326       }
11327 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other)11328       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse other) {
11329         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance()) return this;
11330         if (other.hasProcId()) {
11331           setProcId(other.getProcId());
11332         }
11333         this.mergeUnknownFields(other.getUnknownFields());
11334         return this;
11335       }
11336 
isInitialized()11337       public final boolean isInitialized() {
11338         return true;
11339       }
11340 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11341       public Builder mergeFrom(
11342           com.google.protobuf.CodedInputStream input,
11343           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11344           throws java.io.IOException {
11345         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse parsedMessage = null;
11346         try {
11347           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11348         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11349           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) e.getUnfinishedMessage();
11350           throw e;
11351         } finally {
11352           if (parsedMessage != null) {
11353             mergeFrom(parsedMessage);
11354           }
11355         }
11356         return this;
11357       }
11358       private int bitField0_;
11359 
11360       // optional uint64 proc_id = 1;
11361       private long procId_ ;
11362       /**
11363        * <code>optional uint64 proc_id = 1;</code>
11364        */
hasProcId()11365       public boolean hasProcId() {
11366         return ((bitField0_ & 0x00000001) == 0x00000001);
11367       }
11368       /**
11369        * <code>optional uint64 proc_id = 1;</code>
11370        */
getProcId()11371       public long getProcId() {
11372         return procId_;
11373       }
11374       /**
11375        * <code>optional uint64 proc_id = 1;</code>
11376        */
setProcId(long value)11377       public Builder setProcId(long value) {
11378         bitField0_ |= 0x00000001;
11379         procId_ = value;
11380         onChanged();
11381         return this;
11382       }
11383       /**
11384        * <code>optional uint64 proc_id = 1;</code>
11385        */
clearProcId()11386       public Builder clearProcId() {
11387         bitField0_ = (bitField0_ & ~0x00000001);
11388         procId_ = 0L;
11389         onChanged();
11390         return this;
11391       }
11392 
11393       // @@protoc_insertion_point(builder_scope:DeleteTableResponse)
11394     }
11395 
11396     static {
11397       defaultInstance = new DeleteTableResponse(true);
defaultInstance.initFields()11398       defaultInstance.initFields();
11399     }
11400 
11401     // @@protoc_insertion_point(class_scope:DeleteTableResponse)
11402   }
11403 
11404   public interface TruncateTableRequestOrBuilder
11405       extends com.google.protobuf.MessageOrBuilder {
11406 
11407     // required .TableName tableName = 1;
11408     /**
11409      * <code>required .TableName tableName = 1;</code>
11410      */
hasTableName()11411     boolean hasTableName();
11412     /**
11413      * <code>required .TableName tableName = 1;</code>
11414      */
getTableName()11415     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
11416     /**
11417      * <code>required .TableName tableName = 1;</code>
11418      */
getTableNameOrBuilder()11419     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
11420 
11421     // optional bool preserveSplits = 2 [default = false];
11422     /**
11423      * <code>optional bool preserveSplits = 2 [default = false];</code>
11424      */
hasPreserveSplits()11425     boolean hasPreserveSplits();
11426     /**
11427      * <code>optional bool preserveSplits = 2 [default = false];</code>
11428      */
getPreserveSplits()11429     boolean getPreserveSplits();
11430 
11431     // optional uint64 nonce_group = 3 [default = 0];
11432     /**
11433      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
11434      */
hasNonceGroup()11435     boolean hasNonceGroup();
11436     /**
11437      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
11438      */
getNonceGroup()11439     long getNonceGroup();
11440 
11441     // optional uint64 nonce = 4 [default = 0];
11442     /**
11443      * <code>optional uint64 nonce = 4 [default = 0];</code>
11444      */
hasNonce()11445     boolean hasNonce();
11446     /**
11447      * <code>optional uint64 nonce = 4 [default = 0];</code>
11448      */
getNonce()11449     long getNonce();
11450   }
11451   /**
11452    * Protobuf type {@code TruncateTableRequest}
11453    */
11454   public static final class TruncateTableRequest extends
11455       com.google.protobuf.GeneratedMessage
11456       implements TruncateTableRequestOrBuilder {
11457     // Use TruncateTableRequest.newBuilder() to construct.
TruncateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)11458     private TruncateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11459       super(builder);
11460       this.unknownFields = builder.getUnknownFields();
11461     }
TruncateTableRequest(boolean noInit)11462     private TruncateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11463 
11464     private static final TruncateTableRequest defaultInstance;
getDefaultInstance()11465     public static TruncateTableRequest getDefaultInstance() {
11466       return defaultInstance;
11467     }
11468 
getDefaultInstanceForType()11469     public TruncateTableRequest getDefaultInstanceForType() {
11470       return defaultInstance;
11471     }
11472 
11473     private final com.google.protobuf.UnknownFieldSet unknownFields;
11474     @java.lang.Override
11475     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()11476         getUnknownFields() {
11477       return this.unknownFields;
11478     }
TruncateTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11479     private TruncateTableRequest(
11480         com.google.protobuf.CodedInputStream input,
11481         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11482         throws com.google.protobuf.InvalidProtocolBufferException {
11483       initFields();
11484       int mutable_bitField0_ = 0;
11485       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11486           com.google.protobuf.UnknownFieldSet.newBuilder();
11487       try {
11488         boolean done = false;
11489         while (!done) {
11490           int tag = input.readTag();
11491           switch (tag) {
11492             case 0:
11493               done = true;
11494               break;
11495             default: {
11496               if (!parseUnknownField(input, unknownFields,
11497                                      extensionRegistry, tag)) {
11498                 done = true;
11499               }
11500               break;
11501             }
11502             case 10: {
11503               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
11504               if (((bitField0_ & 0x00000001) == 0x00000001)) {
11505                 subBuilder = tableName_.toBuilder();
11506               }
11507               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
11508               if (subBuilder != null) {
11509                 subBuilder.mergeFrom(tableName_);
11510                 tableName_ = subBuilder.buildPartial();
11511               }
11512               bitField0_ |= 0x00000001;
11513               break;
11514             }
11515             case 16: {
11516               bitField0_ |= 0x00000002;
11517               preserveSplits_ = input.readBool();
11518               break;
11519             }
11520             case 24: {
11521               bitField0_ |= 0x00000004;
11522               nonceGroup_ = input.readUInt64();
11523               break;
11524             }
11525             case 32: {
11526               bitField0_ |= 0x00000008;
11527               nonce_ = input.readUInt64();
11528               break;
11529             }
11530           }
11531         }
11532       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11533         throw e.setUnfinishedMessage(this);
11534       } catch (java.io.IOException e) {
11535         throw new com.google.protobuf.InvalidProtocolBufferException(
11536             e.getMessage()).setUnfinishedMessage(this);
11537       } finally {
11538         this.unknownFields = unknownFields.build();
11539         makeExtensionsImmutable();
11540       }
11541     }
11542     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11543         getDescriptor() {
11544       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_descriptor;
11545     }
11546 
11547     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11548         internalGetFieldAccessorTable() {
11549       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_fieldAccessorTable
11550           .ensureFieldAccessorsInitialized(
11551               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.Builder.class);
11552     }
11553 
11554     public static com.google.protobuf.Parser<TruncateTableRequest> PARSER =
11555         new com.google.protobuf.AbstractParser<TruncateTableRequest>() {
11556       public TruncateTableRequest parsePartialFrom(
11557           com.google.protobuf.CodedInputStream input,
11558           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11559           throws com.google.protobuf.InvalidProtocolBufferException {
11560         return new TruncateTableRequest(input, extensionRegistry);
11561       }
11562     };
11563 
11564     @java.lang.Override
getParserForType()11565     public com.google.protobuf.Parser<TruncateTableRequest> getParserForType() {
11566       return PARSER;
11567     }
11568 
11569     private int bitField0_;
11570     // required .TableName tableName = 1;
11571     public static final int TABLENAME_FIELD_NUMBER = 1;
11572     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
11573     /**
11574      * <code>required .TableName tableName = 1;</code>
11575      */
hasTableName()11576     public boolean hasTableName() {
11577       return ((bitField0_ & 0x00000001) == 0x00000001);
11578     }
11579     /**
11580      * <code>required .TableName tableName = 1;</code>
11581      */
getTableName()11582     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
11583       return tableName_;
11584     }
11585     /**
11586      * <code>required .TableName tableName = 1;</code>
11587      */
getTableNameOrBuilder()11588     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
11589       return tableName_;
11590     }
11591 
11592     // optional bool preserveSplits = 2 [default = false];
11593     public static final int PRESERVESPLITS_FIELD_NUMBER = 2;
11594     private boolean preserveSplits_;
11595     /**
11596      * <code>optional bool preserveSplits = 2 [default = false];</code>
11597      */
hasPreserveSplits()11598     public boolean hasPreserveSplits() {
11599       return ((bitField0_ & 0x00000002) == 0x00000002);
11600     }
11601     /**
11602      * <code>optional bool preserveSplits = 2 [default = false];</code>
11603      */
getPreserveSplits()11604     public boolean getPreserveSplits() {
11605       return preserveSplits_;
11606     }
11607 
11608     // optional uint64 nonce_group = 3 [default = 0];
11609     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
11610     private long nonceGroup_;
11611     /**
11612      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
11613      */
hasNonceGroup()11614     public boolean hasNonceGroup() {
11615       return ((bitField0_ & 0x00000004) == 0x00000004);
11616     }
11617     /**
11618      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
11619      */
getNonceGroup()11620     public long getNonceGroup() {
11621       return nonceGroup_;
11622     }
11623 
11624     // optional uint64 nonce = 4 [default = 0];
11625     public static final int NONCE_FIELD_NUMBER = 4;
11626     private long nonce_;
11627     /**
11628      * <code>optional uint64 nonce = 4 [default = 0];</code>
11629      */
hasNonce()11630     public boolean hasNonce() {
11631       return ((bitField0_ & 0x00000008) == 0x00000008);
11632     }
11633     /**
11634      * <code>optional uint64 nonce = 4 [default = 0];</code>
11635      */
getNonce()11636     public long getNonce() {
11637       return nonce_;
11638     }
11639 
initFields()11640     private void initFields() {
11641       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
11642       preserveSplits_ = false;
11643       nonceGroup_ = 0L;
11644       nonce_ = 0L;
11645     }
11646     private byte memoizedIsInitialized = -1;
isInitialized()11647     public final boolean isInitialized() {
11648       byte isInitialized = memoizedIsInitialized;
11649       if (isInitialized != -1) return isInitialized == 1;
11650 
11651       if (!hasTableName()) {
11652         memoizedIsInitialized = 0;
11653         return false;
11654       }
11655       if (!getTableName().isInitialized()) {
11656         memoizedIsInitialized = 0;
11657         return false;
11658       }
11659       memoizedIsInitialized = 1;
11660       return true;
11661     }
11662 
writeTo(com.google.protobuf.CodedOutputStream output)11663     public void writeTo(com.google.protobuf.CodedOutputStream output)
11664                         throws java.io.IOException {
11665       getSerializedSize();
11666       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11667         output.writeMessage(1, tableName_);
11668       }
11669       if (((bitField0_ & 0x00000002) == 0x00000002)) {
11670         output.writeBool(2, preserveSplits_);
11671       }
11672       if (((bitField0_ & 0x00000004) == 0x00000004)) {
11673         output.writeUInt64(3, nonceGroup_);
11674       }
11675       if (((bitField0_ & 0x00000008) == 0x00000008)) {
11676         output.writeUInt64(4, nonce_);
11677       }
11678       getUnknownFields().writeTo(output);
11679     }
11680 
11681     private int memoizedSerializedSize = -1;
getSerializedSize()11682     public int getSerializedSize() {
11683       int size = memoizedSerializedSize;
11684       if (size != -1) return size;
11685 
11686       size = 0;
11687       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11688         size += com.google.protobuf.CodedOutputStream
11689           .computeMessageSize(1, tableName_);
11690       }
11691       if (((bitField0_ & 0x00000002) == 0x00000002)) {
11692         size += com.google.protobuf.CodedOutputStream
11693           .computeBoolSize(2, preserveSplits_);
11694       }
11695       if (((bitField0_ & 0x00000004) == 0x00000004)) {
11696         size += com.google.protobuf.CodedOutputStream
11697           .computeUInt64Size(3, nonceGroup_);
11698       }
11699       if (((bitField0_ & 0x00000008) == 0x00000008)) {
11700         size += com.google.protobuf.CodedOutputStream
11701           .computeUInt64Size(4, nonce_);
11702       }
11703       size += getUnknownFields().getSerializedSize();
11704       memoizedSerializedSize = size;
11705       return size;
11706     }
11707 
11708     private static final long serialVersionUID = 0L;
11709     @java.lang.Override
writeReplace()11710     protected java.lang.Object writeReplace()
11711         throws java.io.ObjectStreamException {
11712       return super.writeReplace();
11713     }
11714 
11715     @java.lang.Override
equals(final java.lang.Object obj)11716     public boolean equals(final java.lang.Object obj) {
11717       if (obj == this) {
11718        return true;
11719       }
11720       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)) {
11721         return super.equals(obj);
11722       }
11723       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest) obj;
11724 
11725       boolean result = true;
11726       result = result && (hasTableName() == other.hasTableName());
11727       if (hasTableName()) {
11728         result = result && getTableName()
11729             .equals(other.getTableName());
11730       }
11731       result = result && (hasPreserveSplits() == other.hasPreserveSplits());
11732       if (hasPreserveSplits()) {
11733         result = result && (getPreserveSplits()
11734             == other.getPreserveSplits());
11735       }
11736       result = result && (hasNonceGroup() == other.hasNonceGroup());
11737       if (hasNonceGroup()) {
11738         result = result && (getNonceGroup()
11739             == other.getNonceGroup());
11740       }
11741       result = result && (hasNonce() == other.hasNonce());
11742       if (hasNonce()) {
11743         result = result && (getNonce()
11744             == other.getNonce());
11745       }
11746       result = result &&
11747           getUnknownFields().equals(other.getUnknownFields());
11748       return result;
11749     }
11750 
11751     private int memoizedHashCode = 0;
11752     @java.lang.Override
hashCode()11753     public int hashCode() {
11754       if (memoizedHashCode != 0) {
11755         return memoizedHashCode;
11756       }
11757       int hash = 41;
11758       hash = (19 * hash) + getDescriptorForType().hashCode();
11759       if (hasTableName()) {
11760         hash = (37 * hash) + TABLENAME_FIELD_NUMBER;
11761         hash = (53 * hash) + getTableName().hashCode();
11762       }
11763       if (hasPreserveSplits()) {
11764         hash = (37 * hash) + PRESERVESPLITS_FIELD_NUMBER;
11765         hash = (53 * hash) + hashBoolean(getPreserveSplits());
11766       }
11767       if (hasNonceGroup()) {
11768         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
11769         hash = (53 * hash) + hashLong(getNonceGroup());
11770       }
11771       if (hasNonce()) {
11772         hash = (37 * hash) + NONCE_FIELD_NUMBER;
11773         hash = (53 * hash) + hashLong(getNonce());
11774       }
11775       hash = (29 * hash) + getUnknownFields().hashCode();
11776       memoizedHashCode = hash;
11777       return hash;
11778     }
11779 
parseFrom( com.google.protobuf.ByteString data)11780     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(
11781         com.google.protobuf.ByteString data)
11782         throws com.google.protobuf.InvalidProtocolBufferException {
11783       return PARSER.parseFrom(data);
11784     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11785     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(
11786         com.google.protobuf.ByteString data,
11787         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11788         throws com.google.protobuf.InvalidProtocolBufferException {
11789       return PARSER.parseFrom(data, extensionRegistry);
11790     }
parseFrom(byte[] data)11791     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(byte[] data)
11792         throws com.google.protobuf.InvalidProtocolBufferException {
11793       return PARSER.parseFrom(data);
11794     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11795     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(
11796         byte[] data,
11797         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11798         throws com.google.protobuf.InvalidProtocolBufferException {
11799       return PARSER.parseFrom(data, extensionRegistry);
11800     }
parseFrom(java.io.InputStream input)11801     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(java.io.InputStream input)
11802         throws java.io.IOException {
11803       return PARSER.parseFrom(input);
11804     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11805     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(
11806         java.io.InputStream input,
11807         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11808         throws java.io.IOException {
11809       return PARSER.parseFrom(input, extensionRegistry);
11810     }
parseDelimitedFrom(java.io.InputStream input)11811     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseDelimitedFrom(java.io.InputStream input)
11812         throws java.io.IOException {
11813       return PARSER.parseDelimitedFrom(input);
11814     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11815     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseDelimitedFrom(
11816         java.io.InputStream input,
11817         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11818         throws java.io.IOException {
11819       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11820     }
parseFrom( com.google.protobuf.CodedInputStream input)11821     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(
11822         com.google.protobuf.CodedInputStream input)
11823         throws java.io.IOException {
11824       return PARSER.parseFrom(input);
11825     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11826     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parseFrom(
11827         com.google.protobuf.CodedInputStream input,
11828         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11829         throws java.io.IOException {
11830       return PARSER.parseFrom(input, extensionRegistry);
11831     }
11832 
newBuilder()11833     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()11834     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest prototype)11835     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest prototype) {
11836       return newBuilder().mergeFrom(prototype);
11837     }
toBuilder()11838     public Builder toBuilder() { return newBuilder(this); }
11839 
11840     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11841     protected Builder newBuilderForType(
11842         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11843       Builder builder = new Builder(parent);
11844       return builder;
11845     }
11846     /**
11847      * Protobuf type {@code TruncateTableRequest}
11848      */
11849     public static final class Builder extends
11850         com.google.protobuf.GeneratedMessage.Builder<Builder>
11851        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequestOrBuilder {
11852       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11853           getDescriptor() {
11854         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_descriptor;
11855       }
11856 
11857       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11858           internalGetFieldAccessorTable() {
11859         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_fieldAccessorTable
11860             .ensureFieldAccessorsInitialized(
11861                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.Builder.class);
11862       }
11863 
11864       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.newBuilder()
Builder()11865       private Builder() {
11866         maybeForceBuilderInitialization();
11867       }
11868 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11869       private Builder(
11870           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11871         super(parent);
11872         maybeForceBuilderInitialization();
11873       }
maybeForceBuilderInitialization()11874       private void maybeForceBuilderInitialization() {
11875         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11876           getTableNameFieldBuilder();
11877         }
11878       }
create()11879       private static Builder create() {
11880         return new Builder();
11881       }
11882 
clear()11883       public Builder clear() {
11884         super.clear();
11885         if (tableNameBuilder_ == null) {
11886           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
11887         } else {
11888           tableNameBuilder_.clear();
11889         }
11890         bitField0_ = (bitField0_ & ~0x00000001);
11891         preserveSplits_ = false;
11892         bitField0_ = (bitField0_ & ~0x00000002);
11893         nonceGroup_ = 0L;
11894         bitField0_ = (bitField0_ & ~0x00000004);
11895         nonce_ = 0L;
11896         bitField0_ = (bitField0_ & ~0x00000008);
11897         return this;
11898       }
11899 
clone()11900       public Builder clone() {
11901         return create().mergeFrom(buildPartial());
11902       }
11903 
11904       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()11905           getDescriptorForType() {
11906         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableRequest_descriptor;
11907       }
11908 
getDefaultInstanceForType()11909       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest getDefaultInstanceForType() {
11910         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance();
11911       }
11912 
build()11913       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest build() {
11914         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest result = buildPartial();
11915         if (!result.isInitialized()) {
11916           throw newUninitializedMessageException(result);
11917         }
11918         return result;
11919       }
11920 
buildPartial()11921       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest buildPartial() {
11922         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest(this);
11923         int from_bitField0_ = bitField0_;
11924         int to_bitField0_ = 0;
11925         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11926           to_bitField0_ |= 0x00000001;
11927         }
11928         if (tableNameBuilder_ == null) {
11929           result.tableName_ = tableName_;
11930         } else {
11931           result.tableName_ = tableNameBuilder_.build();
11932         }
11933         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
11934           to_bitField0_ |= 0x00000002;
11935         }
11936         result.preserveSplits_ = preserveSplits_;
11937         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
11938           to_bitField0_ |= 0x00000004;
11939         }
11940         result.nonceGroup_ = nonceGroup_;
11941         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
11942           to_bitField0_ |= 0x00000008;
11943         }
11944         result.nonce_ = nonce_;
11945         result.bitField0_ = to_bitField0_;
11946         onBuilt();
11947         return result;
11948       }
11949 
mergeFrom(com.google.protobuf.Message other)11950       public Builder mergeFrom(com.google.protobuf.Message other) {
11951         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest) {
11952           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)other);
11953         } else {
11954           super.mergeFrom(other);
11955           return this;
11956         }
11957       }
11958 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest other)11959       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest other) {
11960         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance()) return this;
11961         if (other.hasTableName()) {
11962           mergeTableName(other.getTableName());
11963         }
11964         if (other.hasPreserveSplits()) {
11965           setPreserveSplits(other.getPreserveSplits());
11966         }
11967         if (other.hasNonceGroup()) {
11968           setNonceGroup(other.getNonceGroup());
11969         }
11970         if (other.hasNonce()) {
11971           setNonce(other.getNonce());
11972         }
11973         this.mergeUnknownFields(other.getUnknownFields());
11974         return this;
11975       }
11976 
isInitialized()11977       public final boolean isInitialized() {
11978         if (!hasTableName()) {
11979 
11980           return false;
11981         }
11982         if (!getTableName().isInitialized()) {
11983 
11984           return false;
11985         }
11986         return true;
11987       }
11988 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11989       public Builder mergeFrom(
11990           com.google.protobuf.CodedInputStream input,
11991           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11992           throws java.io.IOException {
11993         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest parsedMessage = null;
11994         try {
11995           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11996         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11997           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest) e.getUnfinishedMessage();
11998           throw e;
11999         } finally {
12000           if (parsedMessage != null) {
12001             mergeFrom(parsedMessage);
12002           }
12003         }
12004         return this;
12005       }
12006       private int bitField0_;
12007 
12008       // required .TableName tableName = 1;
12009       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
12010       private com.google.protobuf.SingleFieldBuilder<
12011           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
12012       /**
12013        * <code>required .TableName tableName = 1;</code>
12014        */
hasTableName()12015       public boolean hasTableName() {
12016         return ((bitField0_ & 0x00000001) == 0x00000001);
12017       }
12018       /**
12019        * <code>required .TableName tableName = 1;</code>
12020        */
getTableName()12021       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
12022         if (tableNameBuilder_ == null) {
12023           return tableName_;
12024         } else {
12025           return tableNameBuilder_.getMessage();
12026         }
12027       }
12028       /**
12029        * <code>required .TableName tableName = 1;</code>
12030        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)12031       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
12032         if (tableNameBuilder_ == null) {
12033           if (value == null) {
12034             throw new NullPointerException();
12035           }
12036           tableName_ = value;
12037           onChanged();
12038         } else {
12039           tableNameBuilder_.setMessage(value);
12040         }
12041         bitField0_ |= 0x00000001;
12042         return this;
12043       }
12044       /**
12045        * <code>required .TableName tableName = 1;</code>
12046        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)12047       public Builder setTableName(
12048           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
12049         if (tableNameBuilder_ == null) {
12050           tableName_ = builderForValue.build();
12051           onChanged();
12052         } else {
12053           tableNameBuilder_.setMessage(builderForValue.build());
12054         }
12055         bitField0_ |= 0x00000001;
12056         return this;
12057       }
12058       /**
12059        * <code>required .TableName tableName = 1;</code>
12060        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)12061       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
12062         if (tableNameBuilder_ == null) {
12063           if (((bitField0_ & 0x00000001) == 0x00000001) &&
12064               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
12065             tableName_ =
12066               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
12067           } else {
12068             tableName_ = value;
12069           }
12070           onChanged();
12071         } else {
12072           tableNameBuilder_.mergeFrom(value);
12073         }
12074         bitField0_ |= 0x00000001;
12075         return this;
12076       }
12077       /**
12078        * <code>required .TableName tableName = 1;</code>
12079        */
clearTableName()12080       public Builder clearTableName() {
12081         if (tableNameBuilder_ == null) {
12082           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
12083           onChanged();
12084         } else {
12085           tableNameBuilder_.clear();
12086         }
12087         bitField0_ = (bitField0_ & ~0x00000001);
12088         return this;
12089       }
12090       /**
12091        * <code>required .TableName tableName = 1;</code>
12092        */
getTableNameBuilder()12093       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
12094         bitField0_ |= 0x00000001;
12095         onChanged();
12096         return getTableNameFieldBuilder().getBuilder();
12097       }
12098       /**
12099        * <code>required .TableName tableName = 1;</code>
12100        */
getTableNameOrBuilder()12101       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
12102         if (tableNameBuilder_ != null) {
12103           return tableNameBuilder_.getMessageOrBuilder();
12104         } else {
12105           return tableName_;
12106         }
12107       }
12108       /**
12109        * <code>required .TableName tableName = 1;</code>
12110        */
12111       private com.google.protobuf.SingleFieldBuilder<
12112           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()12113           getTableNameFieldBuilder() {
12114         if (tableNameBuilder_ == null) {
12115           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12116               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
12117                   tableName_,
12118                   getParentForChildren(),
12119                   isClean());
12120           tableName_ = null;
12121         }
12122         return tableNameBuilder_;
12123       }
12124 
12125       // optional bool preserveSplits = 2 [default = false];
12126       private boolean preserveSplits_ ;
12127       /**
12128        * <code>optional bool preserveSplits = 2 [default = false];</code>
12129        */
hasPreserveSplits()12130       public boolean hasPreserveSplits() {
12131         return ((bitField0_ & 0x00000002) == 0x00000002);
12132       }
12133       /**
12134        * <code>optional bool preserveSplits = 2 [default = false];</code>
12135        */
getPreserveSplits()12136       public boolean getPreserveSplits() {
12137         return preserveSplits_;
12138       }
12139       /**
12140        * <code>optional bool preserveSplits = 2 [default = false];</code>
12141        */
setPreserveSplits(boolean value)12142       public Builder setPreserveSplits(boolean value) {
12143         bitField0_ |= 0x00000002;
12144         preserveSplits_ = value;
12145         onChanged();
12146         return this;
12147       }
12148       /**
12149        * <code>optional bool preserveSplits = 2 [default = false];</code>
12150        */
clearPreserveSplits()12151       public Builder clearPreserveSplits() {
12152         bitField0_ = (bitField0_ & ~0x00000002);
12153         preserveSplits_ = false;
12154         onChanged();
12155         return this;
12156       }
12157 
12158       // optional uint64 nonce_group = 3 [default = 0];
12159       private long nonceGroup_ ;
12160       /**
12161        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
12162        */
hasNonceGroup()12163       public boolean hasNonceGroup() {
12164         return ((bitField0_ & 0x00000004) == 0x00000004);
12165       }
12166       /**
12167        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
12168        */
getNonceGroup()12169       public long getNonceGroup() {
12170         return nonceGroup_;
12171       }
12172       /**
12173        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
12174        */
setNonceGroup(long value)12175       public Builder setNonceGroup(long value) {
12176         bitField0_ |= 0x00000004;
12177         nonceGroup_ = value;
12178         onChanged();
12179         return this;
12180       }
12181       /**
12182        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
12183        */
clearNonceGroup()12184       public Builder clearNonceGroup() {
12185         bitField0_ = (bitField0_ & ~0x00000004);
12186         nonceGroup_ = 0L;
12187         onChanged();
12188         return this;
12189       }
12190 
12191       // optional uint64 nonce = 4 [default = 0];
12192       private long nonce_ ;
12193       /**
12194        * <code>optional uint64 nonce = 4 [default = 0];</code>
12195        */
hasNonce()12196       public boolean hasNonce() {
12197         return ((bitField0_ & 0x00000008) == 0x00000008);
12198       }
12199       /**
12200        * <code>optional uint64 nonce = 4 [default = 0];</code>
12201        */
getNonce()12202       public long getNonce() {
12203         return nonce_;
12204       }
12205       /**
12206        * <code>optional uint64 nonce = 4 [default = 0];</code>
12207        */
setNonce(long value)12208       public Builder setNonce(long value) {
12209         bitField0_ |= 0x00000008;
12210         nonce_ = value;
12211         onChanged();
12212         return this;
12213       }
12214       /**
12215        * <code>optional uint64 nonce = 4 [default = 0];</code>
12216        */
clearNonce()12217       public Builder clearNonce() {
12218         bitField0_ = (bitField0_ & ~0x00000008);
12219         nonce_ = 0L;
12220         onChanged();
12221         return this;
12222       }
12223 
12224       // @@protoc_insertion_point(builder_scope:TruncateTableRequest)
12225     }
12226 
12227     static {
12228       defaultInstance = new TruncateTableRequest(true);
defaultInstance.initFields()12229       defaultInstance.initFields();
12230     }
12231 
12232     // @@protoc_insertion_point(class_scope:TruncateTableRequest)
12233   }
12234 
12235   public interface TruncateTableResponseOrBuilder
12236       extends com.google.protobuf.MessageOrBuilder {
12237   }
12238   /**
12239    * Protobuf type {@code TruncateTableResponse}
12240    */
12241   public static final class TruncateTableResponse extends
12242       com.google.protobuf.GeneratedMessage
12243       implements TruncateTableResponseOrBuilder {
12244     // Use TruncateTableResponse.newBuilder() to construct.
TruncateTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)12245     private TruncateTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12246       super(builder);
12247       this.unknownFields = builder.getUnknownFields();
12248     }
TruncateTableResponse(boolean noInit)12249     private TruncateTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12250 
12251     private static final TruncateTableResponse defaultInstance;
getDefaultInstance()12252     public static TruncateTableResponse getDefaultInstance() {
12253       return defaultInstance;
12254     }
12255 
getDefaultInstanceForType()12256     public TruncateTableResponse getDefaultInstanceForType() {
12257       return defaultInstance;
12258     }
12259 
12260     private final com.google.protobuf.UnknownFieldSet unknownFields;
12261     @java.lang.Override
12262     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()12263         getUnknownFields() {
12264       return this.unknownFields;
12265     }
TruncateTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12266     private TruncateTableResponse(
12267         com.google.protobuf.CodedInputStream input,
12268         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12269         throws com.google.protobuf.InvalidProtocolBufferException {
12270       initFields();
12271       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12272           com.google.protobuf.UnknownFieldSet.newBuilder();
12273       try {
12274         boolean done = false;
12275         while (!done) {
12276           int tag = input.readTag();
12277           switch (tag) {
12278             case 0:
12279               done = true;
12280               break;
12281             default: {
12282               if (!parseUnknownField(input, unknownFields,
12283                                      extensionRegistry, tag)) {
12284                 done = true;
12285               }
12286               break;
12287             }
12288           }
12289         }
12290       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12291         throw e.setUnfinishedMessage(this);
12292       } catch (java.io.IOException e) {
12293         throw new com.google.protobuf.InvalidProtocolBufferException(
12294             e.getMessage()).setUnfinishedMessage(this);
12295       } finally {
12296         this.unknownFields = unknownFields.build();
12297         makeExtensionsImmutable();
12298       }
12299     }
12300     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12301         getDescriptor() {
12302       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_descriptor;
12303     }
12304 
12305     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12306         internalGetFieldAccessorTable() {
12307       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_fieldAccessorTable
12308           .ensureFieldAccessorsInitialized(
12309               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.Builder.class);
12310     }
12311 
12312     public static com.google.protobuf.Parser<TruncateTableResponse> PARSER =
12313         new com.google.protobuf.AbstractParser<TruncateTableResponse>() {
12314       public TruncateTableResponse parsePartialFrom(
12315           com.google.protobuf.CodedInputStream input,
12316           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12317           throws com.google.protobuf.InvalidProtocolBufferException {
12318         return new TruncateTableResponse(input, extensionRegistry);
12319       }
12320     };
12321 
12322     @java.lang.Override
getParserForType()12323     public com.google.protobuf.Parser<TruncateTableResponse> getParserForType() {
12324       return PARSER;
12325     }
12326 
initFields()12327     private void initFields() {
12328     }
12329     private byte memoizedIsInitialized = -1;
isInitialized()12330     public final boolean isInitialized() {
12331       byte isInitialized = memoizedIsInitialized;
12332       if (isInitialized != -1) return isInitialized == 1;
12333 
12334       memoizedIsInitialized = 1;
12335       return true;
12336     }
12337 
writeTo(com.google.protobuf.CodedOutputStream output)12338     public void writeTo(com.google.protobuf.CodedOutputStream output)
12339                         throws java.io.IOException {
12340       getSerializedSize();
12341       getUnknownFields().writeTo(output);
12342     }
12343 
12344     private int memoizedSerializedSize = -1;
getSerializedSize()12345     public int getSerializedSize() {
12346       int size = memoizedSerializedSize;
12347       if (size != -1) return size;
12348 
12349       size = 0;
12350       size += getUnknownFields().getSerializedSize();
12351       memoizedSerializedSize = size;
12352       return size;
12353     }
12354 
12355     private static final long serialVersionUID = 0L;
12356     @java.lang.Override
writeReplace()12357     protected java.lang.Object writeReplace()
12358         throws java.io.ObjectStreamException {
12359       return super.writeReplace();
12360     }
12361 
12362     @java.lang.Override
equals(final java.lang.Object obj)12363     public boolean equals(final java.lang.Object obj) {
12364       if (obj == this) {
12365        return true;
12366       }
12367       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse)) {
12368         return super.equals(obj);
12369       }
12370       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) obj;
12371 
12372       boolean result = true;
12373       result = result &&
12374           getUnknownFields().equals(other.getUnknownFields());
12375       return result;
12376     }
12377 
12378     private int memoizedHashCode = 0;
12379     @java.lang.Override
hashCode()12380     public int hashCode() {
12381       if (memoizedHashCode != 0) {
12382         return memoizedHashCode;
12383       }
12384       int hash = 41;
12385       hash = (19 * hash) + getDescriptorForType().hashCode();
12386       hash = (29 * hash) + getUnknownFields().hashCode();
12387       memoizedHashCode = hash;
12388       return hash;
12389     }
12390 
parseFrom( com.google.protobuf.ByteString data)12391     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(
12392         com.google.protobuf.ByteString data)
12393         throws com.google.protobuf.InvalidProtocolBufferException {
12394       return PARSER.parseFrom(data);
12395     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12396     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(
12397         com.google.protobuf.ByteString data,
12398         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12399         throws com.google.protobuf.InvalidProtocolBufferException {
12400       return PARSER.parseFrom(data, extensionRegistry);
12401     }
parseFrom(byte[] data)12402     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(byte[] data)
12403         throws com.google.protobuf.InvalidProtocolBufferException {
12404       return PARSER.parseFrom(data);
12405     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12406     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(
12407         byte[] data,
12408         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12409         throws com.google.protobuf.InvalidProtocolBufferException {
12410       return PARSER.parseFrom(data, extensionRegistry);
12411     }
parseFrom(java.io.InputStream input)12412     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(java.io.InputStream input)
12413         throws java.io.IOException {
12414       return PARSER.parseFrom(input);
12415     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12416     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(
12417         java.io.InputStream input,
12418         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12419         throws java.io.IOException {
12420       return PARSER.parseFrom(input, extensionRegistry);
12421     }
parseDelimitedFrom(java.io.InputStream input)12422     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseDelimitedFrom(java.io.InputStream input)
12423         throws java.io.IOException {
12424       return PARSER.parseDelimitedFrom(input);
12425     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12426     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseDelimitedFrom(
12427         java.io.InputStream input,
12428         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12429         throws java.io.IOException {
12430       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12431     }
parseFrom( com.google.protobuf.CodedInputStream input)12432     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(
12433         com.google.protobuf.CodedInputStream input)
12434         throws java.io.IOException {
12435       return PARSER.parseFrom(input);
12436     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12437     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parseFrom(
12438         com.google.protobuf.CodedInputStream input,
12439         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12440         throws java.io.IOException {
12441       return PARSER.parseFrom(input, extensionRegistry);
12442     }
12443 
newBuilder()12444     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()12445     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse prototype)12446     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse prototype) {
12447       return newBuilder().mergeFrom(prototype);
12448     }
toBuilder()12449     public Builder toBuilder() { return newBuilder(this); }
12450 
12451     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)12452     protected Builder newBuilderForType(
12453         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12454       Builder builder = new Builder(parent);
12455       return builder;
12456     }
12457     /**
12458      * Protobuf type {@code TruncateTableResponse}
12459      */
12460     public static final class Builder extends
12461         com.google.protobuf.GeneratedMessage.Builder<Builder>
12462        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponseOrBuilder {
12463       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12464           getDescriptor() {
12465         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_descriptor;
12466       }
12467 
12468       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12469           internalGetFieldAccessorTable() {
12470         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_fieldAccessorTable
12471             .ensureFieldAccessorsInitialized(
12472                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.Builder.class);
12473       }
12474 
12475       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.newBuilder()
Builder()12476       private Builder() {
12477         maybeForceBuilderInitialization();
12478       }
12479 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)12480       private Builder(
12481           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12482         super(parent);
12483         maybeForceBuilderInitialization();
12484       }
maybeForceBuilderInitialization()12485       private void maybeForceBuilderInitialization() {
12486         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12487         }
12488       }
create()12489       private static Builder create() {
12490         return new Builder();
12491       }
12492 
clear()12493       public Builder clear() {
12494         super.clear();
12495         return this;
12496       }
12497 
clone()12498       public Builder clone() {
12499         return create().mergeFrom(buildPartial());
12500       }
12501 
12502       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()12503           getDescriptorForType() {
12504         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_TruncateTableResponse_descriptor;
12505       }
12506 
getDefaultInstanceForType()12507       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse getDefaultInstanceForType() {
12508         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance();
12509       }
12510 
build()12511       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse build() {
12512         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse result = buildPartial();
12513         if (!result.isInitialized()) {
12514           throw newUninitializedMessageException(result);
12515         }
12516         return result;
12517       }
12518 
buildPartial()12519       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse buildPartial() {
12520         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse(this);
12521         onBuilt();
12522         return result;
12523       }
12524 
mergeFrom(com.google.protobuf.Message other)12525       public Builder mergeFrom(com.google.protobuf.Message other) {
12526         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) {
12527           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse)other);
12528         } else {
12529           super.mergeFrom(other);
12530           return this;
12531         }
12532       }
12533 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse other)12534       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse other) {
12535         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance()) return this;
12536         this.mergeUnknownFields(other.getUnknownFields());
12537         return this;
12538       }
12539 
isInitialized()12540       public final boolean isInitialized() {
12541         return true;
12542       }
12543 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12544       public Builder mergeFrom(
12545           com.google.protobuf.CodedInputStream input,
12546           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12547           throws java.io.IOException {
12548         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse parsedMessage = null;
12549         try {
12550           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12551         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12552           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) e.getUnfinishedMessage();
12553           throw e;
12554         } finally {
12555           if (parsedMessage != null) {
12556             mergeFrom(parsedMessage);
12557           }
12558         }
12559         return this;
12560       }
12561 
12562       // @@protoc_insertion_point(builder_scope:TruncateTableResponse)
12563     }
12564 
12565     static {
12566       defaultInstance = new TruncateTableResponse(true);
defaultInstance.initFields()12567       defaultInstance.initFields();
12568     }
12569 
12570     // @@protoc_insertion_point(class_scope:TruncateTableResponse)
12571   }
12572 
12573   public interface EnableTableRequestOrBuilder
12574       extends com.google.protobuf.MessageOrBuilder {
12575 
12576     // required .TableName table_name = 1;
12577     /**
12578      * <code>required .TableName table_name = 1;</code>
12579      */
hasTableName()12580     boolean hasTableName();
12581     /**
12582      * <code>required .TableName table_name = 1;</code>
12583      */
getTableName()12584     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
12585     /**
12586      * <code>required .TableName table_name = 1;</code>
12587      */
getTableNameOrBuilder()12588     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
12589 
12590     // optional uint64 nonce_group = 2 [default = 0];
12591     /**
12592      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
12593      */
hasNonceGroup()12594     boolean hasNonceGroup();
12595     /**
12596      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
12597      */
getNonceGroup()12598     long getNonceGroup();
12599 
12600     // optional uint64 nonce = 3 [default = 0];
12601     /**
12602      * <code>optional uint64 nonce = 3 [default = 0];</code>
12603      */
hasNonce()12604     boolean hasNonce();
12605     /**
12606      * <code>optional uint64 nonce = 3 [default = 0];</code>
12607      */
getNonce()12608     long getNonce();
12609   }
12610   /**
12611    * Protobuf type {@code EnableTableRequest}
12612    */
12613   public static final class EnableTableRequest extends
12614       com.google.protobuf.GeneratedMessage
12615       implements EnableTableRequestOrBuilder {
12616     // Use EnableTableRequest.newBuilder() to construct.
EnableTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)12617     private EnableTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12618       super(builder);
12619       this.unknownFields = builder.getUnknownFields();
12620     }
EnableTableRequest(boolean noInit)12621     private EnableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12622 
12623     private static final EnableTableRequest defaultInstance;
getDefaultInstance()12624     public static EnableTableRequest getDefaultInstance() {
12625       return defaultInstance;
12626     }
12627 
getDefaultInstanceForType()12628     public EnableTableRequest getDefaultInstanceForType() {
12629       return defaultInstance;
12630     }
12631 
12632     private final com.google.protobuf.UnknownFieldSet unknownFields;
12633     @java.lang.Override
12634     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()12635         getUnknownFields() {
12636       return this.unknownFields;
12637     }
EnableTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12638     private EnableTableRequest(
12639         com.google.protobuf.CodedInputStream input,
12640         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12641         throws com.google.protobuf.InvalidProtocolBufferException {
12642       initFields();
12643       int mutable_bitField0_ = 0;
12644       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12645           com.google.protobuf.UnknownFieldSet.newBuilder();
12646       try {
12647         boolean done = false;
12648         while (!done) {
12649           int tag = input.readTag();
12650           switch (tag) {
12651             case 0:
12652               done = true;
12653               break;
12654             default: {
12655               if (!parseUnknownField(input, unknownFields,
12656                                      extensionRegistry, tag)) {
12657                 done = true;
12658               }
12659               break;
12660             }
12661             case 10: {
12662               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
12663               if (((bitField0_ & 0x00000001) == 0x00000001)) {
12664                 subBuilder = tableName_.toBuilder();
12665               }
12666               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
12667               if (subBuilder != null) {
12668                 subBuilder.mergeFrom(tableName_);
12669                 tableName_ = subBuilder.buildPartial();
12670               }
12671               bitField0_ |= 0x00000001;
12672               break;
12673             }
12674             case 16: {
12675               bitField0_ |= 0x00000002;
12676               nonceGroup_ = input.readUInt64();
12677               break;
12678             }
12679             case 24: {
12680               bitField0_ |= 0x00000004;
12681               nonce_ = input.readUInt64();
12682               break;
12683             }
12684           }
12685         }
12686       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12687         throw e.setUnfinishedMessage(this);
12688       } catch (java.io.IOException e) {
12689         throw new com.google.protobuf.InvalidProtocolBufferException(
12690             e.getMessage()).setUnfinishedMessage(this);
12691       } finally {
12692         this.unknownFields = unknownFields.build();
12693         makeExtensionsImmutable();
12694       }
12695     }
12696     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12697         getDescriptor() {
12698       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor;
12699     }
12700 
12701     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12702         internalGetFieldAccessorTable() {
12703       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_fieldAccessorTable
12704           .ensureFieldAccessorsInitialized(
12705               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.Builder.class);
12706     }
12707 
12708     public static com.google.protobuf.Parser<EnableTableRequest> PARSER =
12709         new com.google.protobuf.AbstractParser<EnableTableRequest>() {
12710       public EnableTableRequest parsePartialFrom(
12711           com.google.protobuf.CodedInputStream input,
12712           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12713           throws com.google.protobuf.InvalidProtocolBufferException {
12714         return new EnableTableRequest(input, extensionRegistry);
12715       }
12716     };
12717 
12718     @java.lang.Override
getParserForType()12719     public com.google.protobuf.Parser<EnableTableRequest> getParserForType() {
12720       return PARSER;
12721     }
12722 
12723     private int bitField0_;
12724     // required .TableName table_name = 1;
12725     public static final int TABLE_NAME_FIELD_NUMBER = 1;
12726     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
12727     /**
12728      * <code>required .TableName table_name = 1;</code>
12729      */
hasTableName()12730     public boolean hasTableName() {
12731       return ((bitField0_ & 0x00000001) == 0x00000001);
12732     }
12733     /**
12734      * <code>required .TableName table_name = 1;</code>
12735      */
getTableName()12736     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
12737       return tableName_;
12738     }
12739     /**
12740      * <code>required .TableName table_name = 1;</code>
12741      */
getTableNameOrBuilder()12742     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
12743       return tableName_;
12744     }
12745 
12746     // optional uint64 nonce_group = 2 [default = 0];
12747     public static final int NONCE_GROUP_FIELD_NUMBER = 2;
12748     private long nonceGroup_;
12749     /**
12750      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
12751      */
hasNonceGroup()12752     public boolean hasNonceGroup() {
12753       return ((bitField0_ & 0x00000002) == 0x00000002);
12754     }
12755     /**
12756      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
12757      */
getNonceGroup()12758     public long getNonceGroup() {
12759       return nonceGroup_;
12760     }
12761 
12762     // optional uint64 nonce = 3 [default = 0];
12763     public static final int NONCE_FIELD_NUMBER = 3;
12764     private long nonce_;
12765     /**
12766      * <code>optional uint64 nonce = 3 [default = 0];</code>
12767      */
hasNonce()12768     public boolean hasNonce() {
12769       return ((bitField0_ & 0x00000004) == 0x00000004);
12770     }
12771     /**
12772      * <code>optional uint64 nonce = 3 [default = 0];</code>
12773      */
getNonce()12774     public long getNonce() {
12775       return nonce_;
12776     }
12777 
initFields()12778     private void initFields() {
12779       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
12780       nonceGroup_ = 0L;
12781       nonce_ = 0L;
12782     }
12783     private byte memoizedIsInitialized = -1;
isInitialized()12784     public final boolean isInitialized() {
12785       byte isInitialized = memoizedIsInitialized;
12786       if (isInitialized != -1) return isInitialized == 1;
12787 
12788       if (!hasTableName()) {
12789         memoizedIsInitialized = 0;
12790         return false;
12791       }
12792       if (!getTableName().isInitialized()) {
12793         memoizedIsInitialized = 0;
12794         return false;
12795       }
12796       memoizedIsInitialized = 1;
12797       return true;
12798     }
12799 
writeTo(com.google.protobuf.CodedOutputStream output)12800     public void writeTo(com.google.protobuf.CodedOutputStream output)
12801                         throws java.io.IOException {
12802       getSerializedSize();
12803       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12804         output.writeMessage(1, tableName_);
12805       }
12806       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12807         output.writeUInt64(2, nonceGroup_);
12808       }
12809       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12810         output.writeUInt64(3, nonce_);
12811       }
12812       getUnknownFields().writeTo(output);
12813     }
12814 
12815     private int memoizedSerializedSize = -1;
getSerializedSize()12816     public int getSerializedSize() {
12817       int size = memoizedSerializedSize;
12818       if (size != -1) return size;
12819 
12820       size = 0;
12821       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12822         size += com.google.protobuf.CodedOutputStream
12823           .computeMessageSize(1, tableName_);
12824       }
12825       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12826         size += com.google.protobuf.CodedOutputStream
12827           .computeUInt64Size(2, nonceGroup_);
12828       }
12829       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12830         size += com.google.protobuf.CodedOutputStream
12831           .computeUInt64Size(3, nonce_);
12832       }
12833       size += getUnknownFields().getSerializedSize();
12834       memoizedSerializedSize = size;
12835       return size;
12836     }
12837 
12838     private static final long serialVersionUID = 0L;
12839     @java.lang.Override
writeReplace()12840     protected java.lang.Object writeReplace()
12841         throws java.io.ObjectStreamException {
12842       return super.writeReplace();
12843     }
12844 
12845     @java.lang.Override
equals(final java.lang.Object obj)12846     public boolean equals(final java.lang.Object obj) {
12847       if (obj == this) {
12848        return true;
12849       }
12850       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)) {
12851         return super.equals(obj);
12852       }
12853       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) obj;
12854 
12855       boolean result = true;
12856       result = result && (hasTableName() == other.hasTableName());
12857       if (hasTableName()) {
12858         result = result && getTableName()
12859             .equals(other.getTableName());
12860       }
12861       result = result && (hasNonceGroup() == other.hasNonceGroup());
12862       if (hasNonceGroup()) {
12863         result = result && (getNonceGroup()
12864             == other.getNonceGroup());
12865       }
12866       result = result && (hasNonce() == other.hasNonce());
12867       if (hasNonce()) {
12868         result = result && (getNonce()
12869             == other.getNonce());
12870       }
12871       result = result &&
12872           getUnknownFields().equals(other.getUnknownFields());
12873       return result;
12874     }
12875 
12876     private int memoizedHashCode = 0;
12877     @java.lang.Override
hashCode()12878     public int hashCode() {
12879       if (memoizedHashCode != 0) {
12880         return memoizedHashCode;
12881       }
12882       int hash = 41;
12883       hash = (19 * hash) + getDescriptorForType().hashCode();
12884       if (hasTableName()) {
12885         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
12886         hash = (53 * hash) + getTableName().hashCode();
12887       }
12888       if (hasNonceGroup()) {
12889         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
12890         hash = (53 * hash) + hashLong(getNonceGroup());
12891       }
12892       if (hasNonce()) {
12893         hash = (37 * hash) + NONCE_FIELD_NUMBER;
12894         hash = (53 * hash) + hashLong(getNonce());
12895       }
12896       hash = (29 * hash) + getUnknownFields().hashCode();
12897       memoizedHashCode = hash;
12898       return hash;
12899     }
12900 
parseFrom( com.google.protobuf.ByteString data)12901     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(
12902         com.google.protobuf.ByteString data)
12903         throws com.google.protobuf.InvalidProtocolBufferException {
12904       return PARSER.parseFrom(data);
12905     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12906     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(
12907         com.google.protobuf.ByteString data,
12908         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12909         throws com.google.protobuf.InvalidProtocolBufferException {
12910       return PARSER.parseFrom(data, extensionRegistry);
12911     }
parseFrom(byte[] data)12912     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(byte[] data)
12913         throws com.google.protobuf.InvalidProtocolBufferException {
12914       return PARSER.parseFrom(data);
12915     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12916     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(
12917         byte[] data,
12918         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12919         throws com.google.protobuf.InvalidProtocolBufferException {
12920       return PARSER.parseFrom(data, extensionRegistry);
12921     }
parseFrom(java.io.InputStream input)12922     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(java.io.InputStream input)
12923         throws java.io.IOException {
12924       return PARSER.parseFrom(input);
12925     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12926     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(
12927         java.io.InputStream input,
12928         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12929         throws java.io.IOException {
12930       return PARSER.parseFrom(input, extensionRegistry);
12931     }
parseDelimitedFrom(java.io.InputStream input)12932     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom(java.io.InputStream input)
12933         throws java.io.IOException {
12934       return PARSER.parseDelimitedFrom(input);
12935     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12936     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseDelimitedFrom(
12937         java.io.InputStream input,
12938         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12939         throws java.io.IOException {
12940       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12941     }
parseFrom( com.google.protobuf.CodedInputStream input)12942     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(
12943         com.google.protobuf.CodedInputStream input)
12944         throws java.io.IOException {
12945       return PARSER.parseFrom(input);
12946     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12947     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parseFrom(
12948         com.google.protobuf.CodedInputStream input,
12949         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12950         throws java.io.IOException {
12951       return PARSER.parseFrom(input, extensionRegistry);
12952     }
12953 
newBuilder()12954     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()12955     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest prototype)12956     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest prototype) {
12957       return newBuilder().mergeFrom(prototype);
12958     }
toBuilder()12959     public Builder toBuilder() { return newBuilder(this); }
12960 
12961     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)12962     protected Builder newBuilderForType(
12963         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12964       Builder builder = new Builder(parent);
12965       return builder;
12966     }
12967     /**
12968      * Protobuf type {@code EnableTableRequest}
12969      */
12970     public static final class Builder extends
12971         com.google.protobuf.GeneratedMessage.Builder<Builder>
12972        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequestOrBuilder {
12973       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12974           getDescriptor() {
12975         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor;
12976       }
12977 
12978       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12979           internalGetFieldAccessorTable() {
12980         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_fieldAccessorTable
12981             .ensureFieldAccessorsInitialized(
12982                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.Builder.class);
12983       }
12984 
12985       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.newBuilder()
Builder()12986       private Builder() {
12987         maybeForceBuilderInitialization();
12988       }
12989 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)12990       private Builder(
12991           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12992         super(parent);
12993         maybeForceBuilderInitialization();
12994       }
maybeForceBuilderInitialization()12995       private void maybeForceBuilderInitialization() {
12996         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12997           getTableNameFieldBuilder();
12998         }
12999       }
create()13000       private static Builder create() {
13001         return new Builder();
13002       }
13003 
clear()13004       public Builder clear() {
13005         super.clear();
13006         if (tableNameBuilder_ == null) {
13007           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
13008         } else {
13009           tableNameBuilder_.clear();
13010         }
13011         bitField0_ = (bitField0_ & ~0x00000001);
13012         nonceGroup_ = 0L;
13013         bitField0_ = (bitField0_ & ~0x00000002);
13014         nonce_ = 0L;
13015         bitField0_ = (bitField0_ & ~0x00000004);
13016         return this;
13017       }
13018 
clone()13019       public Builder clone() {
13020         return create().mergeFrom(buildPartial());
13021       }
13022 
13023       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()13024           getDescriptorForType() {
13025         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableRequest_descriptor;
13026       }
13027 
getDefaultInstanceForType()13028       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest getDefaultInstanceForType() {
13029         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance();
13030       }
13031 
build()13032       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest build() {
13033         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = buildPartial();
13034         if (!result.isInitialized()) {
13035           throw newUninitializedMessageException(result);
13036         }
13037         return result;
13038       }
13039 
buildPartial()13040       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest buildPartial() {
13041         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest(this);
13042         int from_bitField0_ = bitField0_;
13043         int to_bitField0_ = 0;
13044         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13045           to_bitField0_ |= 0x00000001;
13046         }
13047         if (tableNameBuilder_ == null) {
13048           result.tableName_ = tableName_;
13049         } else {
13050           result.tableName_ = tableNameBuilder_.build();
13051         }
13052         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
13053           to_bitField0_ |= 0x00000002;
13054         }
13055         result.nonceGroup_ = nonceGroup_;
13056         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
13057           to_bitField0_ |= 0x00000004;
13058         }
13059         result.nonce_ = nonce_;
13060         result.bitField0_ = to_bitField0_;
13061         onBuilt();
13062         return result;
13063       }
13064 
mergeFrom(com.google.protobuf.Message other)13065       public Builder mergeFrom(com.google.protobuf.Message other) {
13066         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) {
13067           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)other);
13068         } else {
13069           super.mergeFrom(other);
13070           return this;
13071         }
13072       }
13073 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other)13074       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest other) {
13075         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance()) return this;
13076         if (other.hasTableName()) {
13077           mergeTableName(other.getTableName());
13078         }
13079         if (other.hasNonceGroup()) {
13080           setNonceGroup(other.getNonceGroup());
13081         }
13082         if (other.hasNonce()) {
13083           setNonce(other.getNonce());
13084         }
13085         this.mergeUnknownFields(other.getUnknownFields());
13086         return this;
13087       }
13088 
isInitialized()13089       public final boolean isInitialized() {
13090         if (!hasTableName()) {
13091 
13092           return false;
13093         }
13094         if (!getTableName().isInitialized()) {
13095 
13096           return false;
13097         }
13098         return true;
13099       }
13100 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13101       public Builder mergeFrom(
13102           com.google.protobuf.CodedInputStream input,
13103           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13104           throws java.io.IOException {
13105         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest parsedMessage = null;
13106         try {
13107           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13108         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13109           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest) e.getUnfinishedMessage();
13110           throw e;
13111         } finally {
13112           if (parsedMessage != null) {
13113             mergeFrom(parsedMessage);
13114           }
13115         }
13116         return this;
13117       }
13118       private int bitField0_;
13119 
13120       // required .TableName table_name = 1;
13121       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
13122       private com.google.protobuf.SingleFieldBuilder<
13123           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
13124       /**
13125        * <code>required .TableName table_name = 1;</code>
13126        */
hasTableName()13127       public boolean hasTableName() {
13128         return ((bitField0_ & 0x00000001) == 0x00000001);
13129       }
13130       /**
13131        * <code>required .TableName table_name = 1;</code>
13132        */
getTableName()13133       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
13134         if (tableNameBuilder_ == null) {
13135           return tableName_;
13136         } else {
13137           return tableNameBuilder_.getMessage();
13138         }
13139       }
13140       /**
13141        * <code>required .TableName table_name = 1;</code>
13142        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)13143       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
13144         if (tableNameBuilder_ == null) {
13145           if (value == null) {
13146             throw new NullPointerException();
13147           }
13148           tableName_ = value;
13149           onChanged();
13150         } else {
13151           tableNameBuilder_.setMessage(value);
13152         }
13153         bitField0_ |= 0x00000001;
13154         return this;
13155       }
13156       /**
13157        * <code>required .TableName table_name = 1;</code>
13158        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)13159       public Builder setTableName(
13160           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
13161         if (tableNameBuilder_ == null) {
13162           tableName_ = builderForValue.build();
13163           onChanged();
13164         } else {
13165           tableNameBuilder_.setMessage(builderForValue.build());
13166         }
13167         bitField0_ |= 0x00000001;
13168         return this;
13169       }
13170       /**
13171        * <code>required .TableName table_name = 1;</code>
13172        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)13173       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
13174         if (tableNameBuilder_ == null) {
13175           if (((bitField0_ & 0x00000001) == 0x00000001) &&
13176               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
13177             tableName_ =
13178               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
13179           } else {
13180             tableName_ = value;
13181           }
13182           onChanged();
13183         } else {
13184           tableNameBuilder_.mergeFrom(value);
13185         }
13186         bitField0_ |= 0x00000001;
13187         return this;
13188       }
13189       /**
13190        * <code>required .TableName table_name = 1;</code>
13191        */
clearTableName()13192       public Builder clearTableName() {
13193         if (tableNameBuilder_ == null) {
13194           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
13195           onChanged();
13196         } else {
13197           tableNameBuilder_.clear();
13198         }
13199         bitField0_ = (bitField0_ & ~0x00000001);
13200         return this;
13201       }
13202       /**
13203        * <code>required .TableName table_name = 1;</code>
13204        */
getTableNameBuilder()13205       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
13206         bitField0_ |= 0x00000001;
13207         onChanged();
13208         return getTableNameFieldBuilder().getBuilder();
13209       }
13210       /**
13211        * <code>required .TableName table_name = 1;</code>
13212        */
getTableNameOrBuilder()13213       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
13214         if (tableNameBuilder_ != null) {
13215           return tableNameBuilder_.getMessageOrBuilder();
13216         } else {
13217           return tableName_;
13218         }
13219       }
13220       /**
13221        * <code>required .TableName table_name = 1;</code>
13222        */
13223       private com.google.protobuf.SingleFieldBuilder<
13224           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()13225           getTableNameFieldBuilder() {
13226         if (tableNameBuilder_ == null) {
13227           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13228               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
13229                   tableName_,
13230                   getParentForChildren(),
13231                   isClean());
13232           tableName_ = null;
13233         }
13234         return tableNameBuilder_;
13235       }
13236 
13237       // optional uint64 nonce_group = 2 [default = 0];
13238       private long nonceGroup_ ;
13239       /**
13240        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13241        */
hasNonceGroup()13242       public boolean hasNonceGroup() {
13243         return ((bitField0_ & 0x00000002) == 0x00000002);
13244       }
13245       /**
13246        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13247        */
getNonceGroup()13248       public long getNonceGroup() {
13249         return nonceGroup_;
13250       }
13251       /**
13252        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13253        */
setNonceGroup(long value)13254       public Builder setNonceGroup(long value) {
13255         bitField0_ |= 0x00000002;
13256         nonceGroup_ = value;
13257         onChanged();
13258         return this;
13259       }
13260       /**
13261        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13262        */
clearNonceGroup()13263       public Builder clearNonceGroup() {
13264         bitField0_ = (bitField0_ & ~0x00000002);
13265         nonceGroup_ = 0L;
13266         onChanged();
13267         return this;
13268       }
13269 
13270       // optional uint64 nonce = 3 [default = 0];
13271       private long nonce_ ;
13272       /**
13273        * <code>optional uint64 nonce = 3 [default = 0];</code>
13274        */
hasNonce()13275       public boolean hasNonce() {
13276         return ((bitField0_ & 0x00000004) == 0x00000004);
13277       }
13278       /**
13279        * <code>optional uint64 nonce = 3 [default = 0];</code>
13280        */
getNonce()13281       public long getNonce() {
13282         return nonce_;
13283       }
13284       /**
13285        * <code>optional uint64 nonce = 3 [default = 0];</code>
13286        */
setNonce(long value)13287       public Builder setNonce(long value) {
13288         bitField0_ |= 0x00000004;
13289         nonce_ = value;
13290         onChanged();
13291         return this;
13292       }
13293       /**
13294        * <code>optional uint64 nonce = 3 [default = 0];</code>
13295        */
clearNonce()13296       public Builder clearNonce() {
13297         bitField0_ = (bitField0_ & ~0x00000004);
13298         nonce_ = 0L;
13299         onChanged();
13300         return this;
13301       }
13302 
13303       // @@protoc_insertion_point(builder_scope:EnableTableRequest)
13304     }
13305 
13306     static {
13307       defaultInstance = new EnableTableRequest(true);
defaultInstance.initFields()13308       defaultInstance.initFields();
13309     }
13310 
13311     // @@protoc_insertion_point(class_scope:EnableTableRequest)
13312   }
13313 
13314   public interface EnableTableResponseOrBuilder
13315       extends com.google.protobuf.MessageOrBuilder {
13316 
13317     // optional uint64 proc_id = 1;
13318     /**
13319      * <code>optional uint64 proc_id = 1;</code>
13320      */
hasProcId()13321     boolean hasProcId();
13322     /**
13323      * <code>optional uint64 proc_id = 1;</code>
13324      */
getProcId()13325     long getProcId();
13326   }
13327   /**
13328    * Protobuf type {@code EnableTableResponse}
13329    */
13330   public static final class EnableTableResponse extends
13331       com.google.protobuf.GeneratedMessage
13332       implements EnableTableResponseOrBuilder {
13333     // Use EnableTableResponse.newBuilder() to construct.
EnableTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)13334     private EnableTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13335       super(builder);
13336       this.unknownFields = builder.getUnknownFields();
13337     }
EnableTableResponse(boolean noInit)13338     private EnableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13339 
13340     private static final EnableTableResponse defaultInstance;
getDefaultInstance()13341     public static EnableTableResponse getDefaultInstance() {
13342       return defaultInstance;
13343     }
13344 
getDefaultInstanceForType()13345     public EnableTableResponse getDefaultInstanceForType() {
13346       return defaultInstance;
13347     }
13348 
13349     private final com.google.protobuf.UnknownFieldSet unknownFields;
13350     @java.lang.Override
13351     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()13352         getUnknownFields() {
13353       return this.unknownFields;
13354     }
EnableTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13355     private EnableTableResponse(
13356         com.google.protobuf.CodedInputStream input,
13357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13358         throws com.google.protobuf.InvalidProtocolBufferException {
13359       initFields();
13360       int mutable_bitField0_ = 0;
13361       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13362           com.google.protobuf.UnknownFieldSet.newBuilder();
13363       try {
13364         boolean done = false;
13365         while (!done) {
13366           int tag = input.readTag();
13367           switch (tag) {
13368             case 0:
13369               done = true;
13370               break;
13371             default: {
13372               if (!parseUnknownField(input, unknownFields,
13373                                      extensionRegistry, tag)) {
13374                 done = true;
13375               }
13376               break;
13377             }
13378             case 8: {
13379               bitField0_ |= 0x00000001;
13380               procId_ = input.readUInt64();
13381               break;
13382             }
13383           }
13384         }
13385       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13386         throw e.setUnfinishedMessage(this);
13387       } catch (java.io.IOException e) {
13388         throw new com.google.protobuf.InvalidProtocolBufferException(
13389             e.getMessage()).setUnfinishedMessage(this);
13390       } finally {
13391         this.unknownFields = unknownFields.build();
13392         makeExtensionsImmutable();
13393       }
13394     }
13395     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13396         getDescriptor() {
13397       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor;
13398     }
13399 
13400     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13401         internalGetFieldAccessorTable() {
13402       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_fieldAccessorTable
13403           .ensureFieldAccessorsInitialized(
13404               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.Builder.class);
13405     }
13406 
13407     public static com.google.protobuf.Parser<EnableTableResponse> PARSER =
13408         new com.google.protobuf.AbstractParser<EnableTableResponse>() {
13409       public EnableTableResponse parsePartialFrom(
13410           com.google.protobuf.CodedInputStream input,
13411           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13412           throws com.google.protobuf.InvalidProtocolBufferException {
13413         return new EnableTableResponse(input, extensionRegistry);
13414       }
13415     };
13416 
13417     @java.lang.Override
getParserForType()13418     public com.google.protobuf.Parser<EnableTableResponse> getParserForType() {
13419       return PARSER;
13420     }
13421 
13422     private int bitField0_;
13423     // optional uint64 proc_id = 1;
13424     public static final int PROC_ID_FIELD_NUMBER = 1;
13425     private long procId_;
13426     /**
13427      * <code>optional uint64 proc_id = 1;</code>
13428      */
hasProcId()13429     public boolean hasProcId() {
13430       return ((bitField0_ & 0x00000001) == 0x00000001);
13431     }
13432     /**
13433      * <code>optional uint64 proc_id = 1;</code>
13434      */
getProcId()13435     public long getProcId() {
13436       return procId_;
13437     }
13438 
initFields()13439     private void initFields() {
13440       procId_ = 0L;
13441     }
13442     private byte memoizedIsInitialized = -1;
isInitialized()13443     public final boolean isInitialized() {
13444       byte isInitialized = memoizedIsInitialized;
13445       if (isInitialized != -1) return isInitialized == 1;
13446 
13447       memoizedIsInitialized = 1;
13448       return true;
13449     }
13450 
writeTo(com.google.protobuf.CodedOutputStream output)13451     public void writeTo(com.google.protobuf.CodedOutputStream output)
13452                         throws java.io.IOException {
13453       getSerializedSize();
13454       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13455         output.writeUInt64(1, procId_);
13456       }
13457       getUnknownFields().writeTo(output);
13458     }
13459 
13460     private int memoizedSerializedSize = -1;
getSerializedSize()13461     public int getSerializedSize() {
13462       int size = memoizedSerializedSize;
13463       if (size != -1) return size;
13464 
13465       size = 0;
13466       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13467         size += com.google.protobuf.CodedOutputStream
13468           .computeUInt64Size(1, procId_);
13469       }
13470       size += getUnknownFields().getSerializedSize();
13471       memoizedSerializedSize = size;
13472       return size;
13473     }
13474 
13475     private static final long serialVersionUID = 0L;
13476     @java.lang.Override
writeReplace()13477     protected java.lang.Object writeReplace()
13478         throws java.io.ObjectStreamException {
13479       return super.writeReplace();
13480     }
13481 
13482     @java.lang.Override
equals(final java.lang.Object obj)13483     public boolean equals(final java.lang.Object obj) {
13484       if (obj == this) {
13485        return true;
13486       }
13487       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse)) {
13488         return super.equals(obj);
13489       }
13490       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) obj;
13491 
13492       boolean result = true;
13493       result = result && (hasProcId() == other.hasProcId());
13494       if (hasProcId()) {
13495         result = result && (getProcId()
13496             == other.getProcId());
13497       }
13498       result = result &&
13499           getUnknownFields().equals(other.getUnknownFields());
13500       return result;
13501     }
13502 
13503     private int memoizedHashCode = 0;
13504     @java.lang.Override
hashCode()13505     public int hashCode() {
13506       if (memoizedHashCode != 0) {
13507         return memoizedHashCode;
13508       }
13509       int hash = 41;
13510       hash = (19 * hash) + getDescriptorForType().hashCode();
13511       if (hasProcId()) {
13512         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
13513         hash = (53 * hash) + hashLong(getProcId());
13514       }
13515       hash = (29 * hash) + getUnknownFields().hashCode();
13516       memoizedHashCode = hash;
13517       return hash;
13518     }
13519 
parseFrom( com.google.protobuf.ByteString data)13520     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(
13521         com.google.protobuf.ByteString data)
13522         throws com.google.protobuf.InvalidProtocolBufferException {
13523       return PARSER.parseFrom(data);
13524     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13525     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(
13526         com.google.protobuf.ByteString data,
13527         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13528         throws com.google.protobuf.InvalidProtocolBufferException {
13529       return PARSER.parseFrom(data, extensionRegistry);
13530     }
parseFrom(byte[] data)13531     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(byte[] data)
13532         throws com.google.protobuf.InvalidProtocolBufferException {
13533       return PARSER.parseFrom(data);
13534     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13535     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(
13536         byte[] data,
13537         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13538         throws com.google.protobuf.InvalidProtocolBufferException {
13539       return PARSER.parseFrom(data, extensionRegistry);
13540     }
parseFrom(java.io.InputStream input)13541     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(java.io.InputStream input)
13542         throws java.io.IOException {
13543       return PARSER.parseFrom(input);
13544     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13545     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(
13546         java.io.InputStream input,
13547         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13548         throws java.io.IOException {
13549       return PARSER.parseFrom(input, extensionRegistry);
13550     }
parseDelimitedFrom(java.io.InputStream input)13551     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom(java.io.InputStream input)
13552         throws java.io.IOException {
13553       return PARSER.parseDelimitedFrom(input);
13554     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13555     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseDelimitedFrom(
13556         java.io.InputStream input,
13557         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13558         throws java.io.IOException {
13559       return PARSER.parseDelimitedFrom(input, extensionRegistry);
13560     }
parseFrom( com.google.protobuf.CodedInputStream input)13561     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(
13562         com.google.protobuf.CodedInputStream input)
13563         throws java.io.IOException {
13564       return PARSER.parseFrom(input);
13565     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13566     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parseFrom(
13567         com.google.protobuf.CodedInputStream input,
13568         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13569         throws java.io.IOException {
13570       return PARSER.parseFrom(input, extensionRegistry);
13571     }
13572 
newBuilder()13573     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()13574     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse prototype)13575     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse prototype) {
13576       return newBuilder().mergeFrom(prototype);
13577     }
toBuilder()13578     public Builder toBuilder() { return newBuilder(this); }
13579 
13580     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)13581     protected Builder newBuilderForType(
13582         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13583       Builder builder = new Builder(parent);
13584       return builder;
13585     }
13586     /**
13587      * Protobuf type {@code EnableTableResponse}
13588      */
13589     public static final class Builder extends
13590         com.google.protobuf.GeneratedMessage.Builder<Builder>
13591        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponseOrBuilder {
13592       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13593           getDescriptor() {
13594         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor;
13595       }
13596 
13597       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13598           internalGetFieldAccessorTable() {
13599         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_fieldAccessorTable
13600             .ensureFieldAccessorsInitialized(
13601                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.Builder.class);
13602       }
13603 
13604       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.newBuilder()
Builder()13605       private Builder() {
13606         maybeForceBuilderInitialization();
13607       }
13608 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)13609       private Builder(
13610           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13611         super(parent);
13612         maybeForceBuilderInitialization();
13613       }
maybeForceBuilderInitialization()13614       private void maybeForceBuilderInitialization() {
13615         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13616         }
13617       }
create()13618       private static Builder create() {
13619         return new Builder();
13620       }
13621 
clear()13622       public Builder clear() {
13623         super.clear();
13624         procId_ = 0L;
13625         bitField0_ = (bitField0_ & ~0x00000001);
13626         return this;
13627       }
13628 
clone()13629       public Builder clone() {
13630         return create().mergeFrom(buildPartial());
13631       }
13632 
13633       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()13634           getDescriptorForType() {
13635         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableTableResponse_descriptor;
13636       }
13637 
getDefaultInstanceForType()13638       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse getDefaultInstanceForType() {
13639         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance();
13640       }
13641 
build()13642       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse build() {
13643         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = buildPartial();
13644         if (!result.isInitialized()) {
13645           throw newUninitializedMessageException(result);
13646         }
13647         return result;
13648       }
13649 
buildPartial()13650       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse buildPartial() {
13651         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse(this);
13652         int from_bitField0_ = bitField0_;
13653         int to_bitField0_ = 0;
13654         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13655           to_bitField0_ |= 0x00000001;
13656         }
13657         result.procId_ = procId_;
13658         result.bitField0_ = to_bitField0_;
13659         onBuilt();
13660         return result;
13661       }
13662 
mergeFrom(com.google.protobuf.Message other)13663       public Builder mergeFrom(com.google.protobuf.Message other) {
13664         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) {
13665           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse)other);
13666         } else {
13667           super.mergeFrom(other);
13668           return this;
13669         }
13670       }
13671 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other)13672       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse other) {
13673         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance()) return this;
13674         if (other.hasProcId()) {
13675           setProcId(other.getProcId());
13676         }
13677         this.mergeUnknownFields(other.getUnknownFields());
13678         return this;
13679       }
13680 
isInitialized()13681       public final boolean isInitialized() {
13682         return true;
13683       }
13684 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13685       public Builder mergeFrom(
13686           com.google.protobuf.CodedInputStream input,
13687           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13688           throws java.io.IOException {
13689         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse parsedMessage = null;
13690         try {
13691           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13692         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13693           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) e.getUnfinishedMessage();
13694           throw e;
13695         } finally {
13696           if (parsedMessage != null) {
13697             mergeFrom(parsedMessage);
13698           }
13699         }
13700         return this;
13701       }
13702       private int bitField0_;
13703 
13704       // optional uint64 proc_id = 1;
13705       private long procId_ ;
13706       /**
13707        * <code>optional uint64 proc_id = 1;</code>
13708        */
hasProcId()13709       public boolean hasProcId() {
13710         return ((bitField0_ & 0x00000001) == 0x00000001);
13711       }
13712       /**
13713        * <code>optional uint64 proc_id = 1;</code>
13714        */
getProcId()13715       public long getProcId() {
13716         return procId_;
13717       }
13718       /**
13719        * <code>optional uint64 proc_id = 1;</code>
13720        */
setProcId(long value)13721       public Builder setProcId(long value) {
13722         bitField0_ |= 0x00000001;
13723         procId_ = value;
13724         onChanged();
13725         return this;
13726       }
13727       /**
13728        * <code>optional uint64 proc_id = 1;</code>
13729        */
clearProcId()13730       public Builder clearProcId() {
13731         bitField0_ = (bitField0_ & ~0x00000001);
13732         procId_ = 0L;
13733         onChanged();
13734         return this;
13735       }
13736 
13737       // @@protoc_insertion_point(builder_scope:EnableTableResponse)
13738     }
13739 
13740     static {
13741       defaultInstance = new EnableTableResponse(true);
defaultInstance.initFields()13742       defaultInstance.initFields();
13743     }
13744 
13745     // @@protoc_insertion_point(class_scope:EnableTableResponse)
13746   }
13747 
13748   public interface DisableTableRequestOrBuilder
13749       extends com.google.protobuf.MessageOrBuilder {
13750 
13751     // required .TableName table_name = 1;
13752     /**
13753      * <code>required .TableName table_name = 1;</code>
13754      */
hasTableName()13755     boolean hasTableName();
13756     /**
13757      * <code>required .TableName table_name = 1;</code>
13758      */
getTableName()13759     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
13760     /**
13761      * <code>required .TableName table_name = 1;</code>
13762      */
getTableNameOrBuilder()13763     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
13764 
13765     // optional uint64 nonce_group = 2 [default = 0];
13766     /**
13767      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13768      */
hasNonceGroup()13769     boolean hasNonceGroup();
13770     /**
13771      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13772      */
getNonceGroup()13773     long getNonceGroup();
13774 
13775     // optional uint64 nonce = 3 [default = 0];
13776     /**
13777      * <code>optional uint64 nonce = 3 [default = 0];</code>
13778      */
hasNonce()13779     boolean hasNonce();
13780     /**
13781      * <code>optional uint64 nonce = 3 [default = 0];</code>
13782      */
getNonce()13783     long getNonce();
13784   }
13785   /**
13786    * Protobuf type {@code DisableTableRequest}
13787    */
13788   public static final class DisableTableRequest extends
13789       com.google.protobuf.GeneratedMessage
13790       implements DisableTableRequestOrBuilder {
13791     // Use DisableTableRequest.newBuilder() to construct.
DisableTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)13792     private DisableTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13793       super(builder);
13794       this.unknownFields = builder.getUnknownFields();
13795     }
DisableTableRequest(boolean noInit)13796     private DisableTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13797 
13798     private static final DisableTableRequest defaultInstance;
getDefaultInstance()13799     public static DisableTableRequest getDefaultInstance() {
13800       return defaultInstance;
13801     }
13802 
getDefaultInstanceForType()13803     public DisableTableRequest getDefaultInstanceForType() {
13804       return defaultInstance;
13805     }
13806 
13807     private final com.google.protobuf.UnknownFieldSet unknownFields;
13808     @java.lang.Override
13809     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()13810         getUnknownFields() {
13811       return this.unknownFields;
13812     }
DisableTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13813     private DisableTableRequest(
13814         com.google.protobuf.CodedInputStream input,
13815         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13816         throws com.google.protobuf.InvalidProtocolBufferException {
13817       initFields();
13818       int mutable_bitField0_ = 0;
13819       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13820           com.google.protobuf.UnknownFieldSet.newBuilder();
13821       try {
13822         boolean done = false;
13823         while (!done) {
13824           int tag = input.readTag();
13825           switch (tag) {
13826             case 0:
13827               done = true;
13828               break;
13829             default: {
13830               if (!parseUnknownField(input, unknownFields,
13831                                      extensionRegistry, tag)) {
13832                 done = true;
13833               }
13834               break;
13835             }
13836             case 10: {
13837               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
13838               if (((bitField0_ & 0x00000001) == 0x00000001)) {
13839                 subBuilder = tableName_.toBuilder();
13840               }
13841               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
13842               if (subBuilder != null) {
13843                 subBuilder.mergeFrom(tableName_);
13844                 tableName_ = subBuilder.buildPartial();
13845               }
13846               bitField0_ |= 0x00000001;
13847               break;
13848             }
13849             case 16: {
13850               bitField0_ |= 0x00000002;
13851               nonceGroup_ = input.readUInt64();
13852               break;
13853             }
13854             case 24: {
13855               bitField0_ |= 0x00000004;
13856               nonce_ = input.readUInt64();
13857               break;
13858             }
13859           }
13860         }
13861       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13862         throw e.setUnfinishedMessage(this);
13863       } catch (java.io.IOException e) {
13864         throw new com.google.protobuf.InvalidProtocolBufferException(
13865             e.getMessage()).setUnfinishedMessage(this);
13866       } finally {
13867         this.unknownFields = unknownFields.build();
13868         makeExtensionsImmutable();
13869       }
13870     }
13871     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13872         getDescriptor() {
13873       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor;
13874     }
13875 
13876     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13877         internalGetFieldAccessorTable() {
13878       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_fieldAccessorTable
13879           .ensureFieldAccessorsInitialized(
13880               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.Builder.class);
13881     }
13882 
13883     public static com.google.protobuf.Parser<DisableTableRequest> PARSER =
13884         new com.google.protobuf.AbstractParser<DisableTableRequest>() {
13885       public DisableTableRequest parsePartialFrom(
13886           com.google.protobuf.CodedInputStream input,
13887           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13888           throws com.google.protobuf.InvalidProtocolBufferException {
13889         return new DisableTableRequest(input, extensionRegistry);
13890       }
13891     };
13892 
13893     @java.lang.Override
getParserForType()13894     public com.google.protobuf.Parser<DisableTableRequest> getParserForType() {
13895       return PARSER;
13896     }
13897 
13898     private int bitField0_;
13899     // required .TableName table_name = 1;
13900     public static final int TABLE_NAME_FIELD_NUMBER = 1;
13901     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
13902     /**
13903      * <code>required .TableName table_name = 1;</code>
13904      */
hasTableName()13905     public boolean hasTableName() {
13906       return ((bitField0_ & 0x00000001) == 0x00000001);
13907     }
13908     /**
13909      * <code>required .TableName table_name = 1;</code>
13910      */
getTableName()13911     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
13912       return tableName_;
13913     }
13914     /**
13915      * <code>required .TableName table_name = 1;</code>
13916      */
getTableNameOrBuilder()13917     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
13918       return tableName_;
13919     }
13920 
13921     // optional uint64 nonce_group = 2 [default = 0];
13922     public static final int NONCE_GROUP_FIELD_NUMBER = 2;
13923     private long nonceGroup_;
13924     /**
13925      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13926      */
hasNonceGroup()13927     public boolean hasNonceGroup() {
13928       return ((bitField0_ & 0x00000002) == 0x00000002);
13929     }
13930     /**
13931      * <code>optional uint64 nonce_group = 2 [default = 0];</code>
13932      */
getNonceGroup()13933     public long getNonceGroup() {
13934       return nonceGroup_;
13935     }
13936 
13937     // optional uint64 nonce = 3 [default = 0];
13938     public static final int NONCE_FIELD_NUMBER = 3;
13939     private long nonce_;
13940     /**
13941      * <code>optional uint64 nonce = 3 [default = 0];</code>
13942      */
hasNonce()13943     public boolean hasNonce() {
13944       return ((bitField0_ & 0x00000004) == 0x00000004);
13945     }
13946     /**
13947      * <code>optional uint64 nonce = 3 [default = 0];</code>
13948      */
getNonce()13949     public long getNonce() {
13950       return nonce_;
13951     }
13952 
initFields()13953     private void initFields() {
13954       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
13955       nonceGroup_ = 0L;
13956       nonce_ = 0L;
13957     }
13958     private byte memoizedIsInitialized = -1;
isInitialized()13959     public final boolean isInitialized() {
13960       byte isInitialized = memoizedIsInitialized;
13961       if (isInitialized != -1) return isInitialized == 1;
13962 
13963       if (!hasTableName()) {
13964         memoizedIsInitialized = 0;
13965         return false;
13966       }
13967       if (!getTableName().isInitialized()) {
13968         memoizedIsInitialized = 0;
13969         return false;
13970       }
13971       memoizedIsInitialized = 1;
13972       return true;
13973     }
13974 
writeTo(com.google.protobuf.CodedOutputStream output)13975     public void writeTo(com.google.protobuf.CodedOutputStream output)
13976                         throws java.io.IOException {
13977       getSerializedSize();
13978       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13979         output.writeMessage(1, tableName_);
13980       }
13981       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13982         output.writeUInt64(2, nonceGroup_);
13983       }
13984       if (((bitField0_ & 0x00000004) == 0x00000004)) {
13985         output.writeUInt64(3, nonce_);
13986       }
13987       getUnknownFields().writeTo(output);
13988     }
13989 
13990     private int memoizedSerializedSize = -1;
getSerializedSize()13991     public int getSerializedSize() {
13992       int size = memoizedSerializedSize;
13993       if (size != -1) return size;
13994 
13995       size = 0;
13996       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13997         size += com.google.protobuf.CodedOutputStream
13998           .computeMessageSize(1, tableName_);
13999       }
14000       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14001         size += com.google.protobuf.CodedOutputStream
14002           .computeUInt64Size(2, nonceGroup_);
14003       }
14004       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14005         size += com.google.protobuf.CodedOutputStream
14006           .computeUInt64Size(3, nonce_);
14007       }
14008       size += getUnknownFields().getSerializedSize();
14009       memoizedSerializedSize = size;
14010       return size;
14011     }
14012 
14013     private static final long serialVersionUID = 0L;
14014     @java.lang.Override
writeReplace()14015     protected java.lang.Object writeReplace()
14016         throws java.io.ObjectStreamException {
14017       return super.writeReplace();
14018     }
14019 
14020     @java.lang.Override
equals(final java.lang.Object obj)14021     public boolean equals(final java.lang.Object obj) {
14022       if (obj == this) {
14023        return true;
14024       }
14025       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)) {
14026         return super.equals(obj);
14027       }
14028       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) obj;
14029 
14030       boolean result = true;
14031       result = result && (hasTableName() == other.hasTableName());
14032       if (hasTableName()) {
14033         result = result && getTableName()
14034             .equals(other.getTableName());
14035       }
14036       result = result && (hasNonceGroup() == other.hasNonceGroup());
14037       if (hasNonceGroup()) {
14038         result = result && (getNonceGroup()
14039             == other.getNonceGroup());
14040       }
14041       result = result && (hasNonce() == other.hasNonce());
14042       if (hasNonce()) {
14043         result = result && (getNonce()
14044             == other.getNonce());
14045       }
14046       result = result &&
14047           getUnknownFields().equals(other.getUnknownFields());
14048       return result;
14049     }
14050 
14051     private int memoizedHashCode = 0;
14052     @java.lang.Override
hashCode()14053     public int hashCode() {
14054       if (memoizedHashCode != 0) {
14055         return memoizedHashCode;
14056       }
14057       int hash = 41;
14058       hash = (19 * hash) + getDescriptorForType().hashCode();
14059       if (hasTableName()) {
14060         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
14061         hash = (53 * hash) + getTableName().hashCode();
14062       }
14063       if (hasNonceGroup()) {
14064         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
14065         hash = (53 * hash) + hashLong(getNonceGroup());
14066       }
14067       if (hasNonce()) {
14068         hash = (37 * hash) + NONCE_FIELD_NUMBER;
14069         hash = (53 * hash) + hashLong(getNonce());
14070       }
14071       hash = (29 * hash) + getUnknownFields().hashCode();
14072       memoizedHashCode = hash;
14073       return hash;
14074     }
14075 
parseFrom( com.google.protobuf.ByteString data)14076     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(
14077         com.google.protobuf.ByteString data)
14078         throws com.google.protobuf.InvalidProtocolBufferException {
14079       return PARSER.parseFrom(data);
14080     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14081     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(
14082         com.google.protobuf.ByteString data,
14083         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14084         throws com.google.protobuf.InvalidProtocolBufferException {
14085       return PARSER.parseFrom(data, extensionRegistry);
14086     }
parseFrom(byte[] data)14087     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(byte[] data)
14088         throws com.google.protobuf.InvalidProtocolBufferException {
14089       return PARSER.parseFrom(data);
14090     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14091     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(
14092         byte[] data,
14093         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14094         throws com.google.protobuf.InvalidProtocolBufferException {
14095       return PARSER.parseFrom(data, extensionRegistry);
14096     }
parseFrom(java.io.InputStream input)14097     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(java.io.InputStream input)
14098         throws java.io.IOException {
14099       return PARSER.parseFrom(input);
14100     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14101     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(
14102         java.io.InputStream input,
14103         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14104         throws java.io.IOException {
14105       return PARSER.parseFrom(input, extensionRegistry);
14106     }
parseDelimitedFrom(java.io.InputStream input)14107     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom(java.io.InputStream input)
14108         throws java.io.IOException {
14109       return PARSER.parseDelimitedFrom(input);
14110     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14111     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseDelimitedFrom(
14112         java.io.InputStream input,
14113         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14114         throws java.io.IOException {
14115       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14116     }
parseFrom( com.google.protobuf.CodedInputStream input)14117     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(
14118         com.google.protobuf.CodedInputStream input)
14119         throws java.io.IOException {
14120       return PARSER.parseFrom(input);
14121     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14122     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parseFrom(
14123         com.google.protobuf.CodedInputStream input,
14124         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14125         throws java.io.IOException {
14126       return PARSER.parseFrom(input, extensionRegistry);
14127     }
14128 
newBuilder()14129     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()14130     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest prototype)14131     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest prototype) {
14132       return newBuilder().mergeFrom(prototype);
14133     }
toBuilder()14134     public Builder toBuilder() { return newBuilder(this); }
14135 
14136     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)14137     protected Builder newBuilderForType(
14138         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14139       Builder builder = new Builder(parent);
14140       return builder;
14141     }
14142     /**
14143      * Protobuf type {@code DisableTableRequest}
14144      */
14145     public static final class Builder extends
14146         com.google.protobuf.GeneratedMessage.Builder<Builder>
14147        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequestOrBuilder {
14148       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14149           getDescriptor() {
14150         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor;
14151       }
14152 
14153       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14154           internalGetFieldAccessorTable() {
14155         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_fieldAccessorTable
14156             .ensureFieldAccessorsInitialized(
14157                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.Builder.class);
14158       }
14159 
14160       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.newBuilder()
Builder()14161       private Builder() {
14162         maybeForceBuilderInitialization();
14163       }
14164 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)14165       private Builder(
14166           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14167         super(parent);
14168         maybeForceBuilderInitialization();
14169       }
maybeForceBuilderInitialization()14170       private void maybeForceBuilderInitialization() {
14171         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14172           getTableNameFieldBuilder();
14173         }
14174       }
create()14175       private static Builder create() {
14176         return new Builder();
14177       }
14178 
clear()14179       public Builder clear() {
14180         super.clear();
14181         if (tableNameBuilder_ == null) {
14182           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
14183         } else {
14184           tableNameBuilder_.clear();
14185         }
14186         bitField0_ = (bitField0_ & ~0x00000001);
14187         nonceGroup_ = 0L;
14188         bitField0_ = (bitField0_ & ~0x00000002);
14189         nonce_ = 0L;
14190         bitField0_ = (bitField0_ & ~0x00000004);
14191         return this;
14192       }
14193 
clone()14194       public Builder clone() {
14195         return create().mergeFrom(buildPartial());
14196       }
14197 
14198       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()14199           getDescriptorForType() {
14200         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableRequest_descriptor;
14201       }
14202 
getDefaultInstanceForType()14203       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest getDefaultInstanceForType() {
14204         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance();
14205       }
14206 
build()14207       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest build() {
14208         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = buildPartial();
14209         if (!result.isInitialized()) {
14210           throw newUninitializedMessageException(result);
14211         }
14212         return result;
14213       }
14214 
buildPartial()14215       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest buildPartial() {
14216         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest(this);
14217         int from_bitField0_ = bitField0_;
14218         int to_bitField0_ = 0;
14219         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14220           to_bitField0_ |= 0x00000001;
14221         }
14222         if (tableNameBuilder_ == null) {
14223           result.tableName_ = tableName_;
14224         } else {
14225           result.tableName_ = tableNameBuilder_.build();
14226         }
14227         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
14228           to_bitField0_ |= 0x00000002;
14229         }
14230         result.nonceGroup_ = nonceGroup_;
14231         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
14232           to_bitField0_ |= 0x00000004;
14233         }
14234         result.nonce_ = nonce_;
14235         result.bitField0_ = to_bitField0_;
14236         onBuilt();
14237         return result;
14238       }
14239 
mergeFrom(com.google.protobuf.Message other)14240       public Builder mergeFrom(com.google.protobuf.Message other) {
14241         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) {
14242           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)other);
14243         } else {
14244           super.mergeFrom(other);
14245           return this;
14246         }
14247       }
14248 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other)14249       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest other) {
14250         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance()) return this;
14251         if (other.hasTableName()) {
14252           mergeTableName(other.getTableName());
14253         }
14254         if (other.hasNonceGroup()) {
14255           setNonceGroup(other.getNonceGroup());
14256         }
14257         if (other.hasNonce()) {
14258           setNonce(other.getNonce());
14259         }
14260         this.mergeUnknownFields(other.getUnknownFields());
14261         return this;
14262       }
14263 
isInitialized()14264       public final boolean isInitialized() {
14265         if (!hasTableName()) {
14266 
14267           return false;
14268         }
14269         if (!getTableName().isInitialized()) {
14270 
14271           return false;
14272         }
14273         return true;
14274       }
14275 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14276       public Builder mergeFrom(
14277           com.google.protobuf.CodedInputStream input,
14278           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14279           throws java.io.IOException {
14280         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest parsedMessage = null;
14281         try {
14282           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14283         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14284           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest) e.getUnfinishedMessage();
14285           throw e;
14286         } finally {
14287           if (parsedMessage != null) {
14288             mergeFrom(parsedMessage);
14289           }
14290         }
14291         return this;
14292       }
14293       private int bitField0_;
14294 
14295       // required .TableName table_name = 1;
14296       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
14297       private com.google.protobuf.SingleFieldBuilder<
14298           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
14299       /**
14300        * <code>required .TableName table_name = 1;</code>
14301        */
hasTableName()14302       public boolean hasTableName() {
14303         return ((bitField0_ & 0x00000001) == 0x00000001);
14304       }
14305       /**
14306        * <code>required .TableName table_name = 1;</code>
14307        */
getTableName()14308       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
14309         if (tableNameBuilder_ == null) {
14310           return tableName_;
14311         } else {
14312           return tableNameBuilder_.getMessage();
14313         }
14314       }
14315       /**
14316        * <code>required .TableName table_name = 1;</code>
14317        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)14318       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
14319         if (tableNameBuilder_ == null) {
14320           if (value == null) {
14321             throw new NullPointerException();
14322           }
14323           tableName_ = value;
14324           onChanged();
14325         } else {
14326           tableNameBuilder_.setMessage(value);
14327         }
14328         bitField0_ |= 0x00000001;
14329         return this;
14330       }
14331       /**
14332        * <code>required .TableName table_name = 1;</code>
14333        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)14334       public Builder setTableName(
14335           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
14336         if (tableNameBuilder_ == null) {
14337           tableName_ = builderForValue.build();
14338           onChanged();
14339         } else {
14340           tableNameBuilder_.setMessage(builderForValue.build());
14341         }
14342         bitField0_ |= 0x00000001;
14343         return this;
14344       }
14345       /**
14346        * <code>required .TableName table_name = 1;</code>
14347        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)14348       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
14349         if (tableNameBuilder_ == null) {
14350           if (((bitField0_ & 0x00000001) == 0x00000001) &&
14351               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
14352             tableName_ =
14353               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
14354           } else {
14355             tableName_ = value;
14356           }
14357           onChanged();
14358         } else {
14359           tableNameBuilder_.mergeFrom(value);
14360         }
14361         bitField0_ |= 0x00000001;
14362         return this;
14363       }
14364       /**
14365        * <code>required .TableName table_name = 1;</code>
14366        */
clearTableName()14367       public Builder clearTableName() {
14368         if (tableNameBuilder_ == null) {
14369           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
14370           onChanged();
14371         } else {
14372           tableNameBuilder_.clear();
14373         }
14374         bitField0_ = (bitField0_ & ~0x00000001);
14375         return this;
14376       }
14377       /**
14378        * <code>required .TableName table_name = 1;</code>
14379        */
getTableNameBuilder()14380       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
14381         bitField0_ |= 0x00000001;
14382         onChanged();
14383         return getTableNameFieldBuilder().getBuilder();
14384       }
14385       /**
14386        * <code>required .TableName table_name = 1;</code>
14387        */
getTableNameOrBuilder()14388       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
14389         if (tableNameBuilder_ != null) {
14390           return tableNameBuilder_.getMessageOrBuilder();
14391         } else {
14392           return tableName_;
14393         }
14394       }
14395       /**
14396        * <code>required .TableName table_name = 1;</code>
14397        */
14398       private com.google.protobuf.SingleFieldBuilder<
14399           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()14400           getTableNameFieldBuilder() {
14401         if (tableNameBuilder_ == null) {
14402           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
14403               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
14404                   tableName_,
14405                   getParentForChildren(),
14406                   isClean());
14407           tableName_ = null;
14408         }
14409         return tableNameBuilder_;
14410       }
14411 
14412       // optional uint64 nonce_group = 2 [default = 0];
14413       private long nonceGroup_ ;
14414       /**
14415        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
14416        */
hasNonceGroup()14417       public boolean hasNonceGroup() {
14418         return ((bitField0_ & 0x00000002) == 0x00000002);
14419       }
14420       /**
14421        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
14422        */
getNonceGroup()14423       public long getNonceGroup() {
14424         return nonceGroup_;
14425       }
14426       /**
14427        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
14428        */
setNonceGroup(long value)14429       public Builder setNonceGroup(long value) {
14430         bitField0_ |= 0x00000002;
14431         nonceGroup_ = value;
14432         onChanged();
14433         return this;
14434       }
14435       /**
14436        * <code>optional uint64 nonce_group = 2 [default = 0];</code>
14437        */
clearNonceGroup()14438       public Builder clearNonceGroup() {
14439         bitField0_ = (bitField0_ & ~0x00000002);
14440         nonceGroup_ = 0L;
14441         onChanged();
14442         return this;
14443       }
14444 
14445       // optional uint64 nonce = 3 [default = 0];
14446       private long nonce_ ;
14447       /**
14448        * <code>optional uint64 nonce = 3 [default = 0];</code>
14449        */
hasNonce()14450       public boolean hasNonce() {
14451         return ((bitField0_ & 0x00000004) == 0x00000004);
14452       }
14453       /**
14454        * <code>optional uint64 nonce = 3 [default = 0];</code>
14455        */
getNonce()14456       public long getNonce() {
14457         return nonce_;
14458       }
14459       /**
14460        * <code>optional uint64 nonce = 3 [default = 0];</code>
14461        */
setNonce(long value)14462       public Builder setNonce(long value) {
14463         bitField0_ |= 0x00000004;
14464         nonce_ = value;
14465         onChanged();
14466         return this;
14467       }
14468       /**
14469        * <code>optional uint64 nonce = 3 [default = 0];</code>
14470        */
clearNonce()14471       public Builder clearNonce() {
14472         bitField0_ = (bitField0_ & ~0x00000004);
14473         nonce_ = 0L;
14474         onChanged();
14475         return this;
14476       }
14477 
14478       // @@protoc_insertion_point(builder_scope:DisableTableRequest)
14479     }
14480 
14481     static {
14482       defaultInstance = new DisableTableRequest(true);
defaultInstance.initFields()14483       defaultInstance.initFields();
14484     }
14485 
14486     // @@protoc_insertion_point(class_scope:DisableTableRequest)
14487   }
14488 
14489   public interface DisableTableResponseOrBuilder
14490       extends com.google.protobuf.MessageOrBuilder {
14491 
14492     // optional uint64 proc_id = 1;
14493     /**
14494      * <code>optional uint64 proc_id = 1;</code>
14495      */
hasProcId()14496     boolean hasProcId();
14497     /**
14498      * <code>optional uint64 proc_id = 1;</code>
14499      */
getProcId()14500     long getProcId();
14501   }
14502   /**
14503    * Protobuf type {@code DisableTableResponse}
14504    */
14505   public static final class DisableTableResponse extends
14506       com.google.protobuf.GeneratedMessage
14507       implements DisableTableResponseOrBuilder {
14508     // Use DisableTableResponse.newBuilder() to construct.
DisableTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)14509     private DisableTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14510       super(builder);
14511       this.unknownFields = builder.getUnknownFields();
14512     }
DisableTableResponse(boolean noInit)14513     private DisableTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14514 
14515     private static final DisableTableResponse defaultInstance;
getDefaultInstance()14516     public static DisableTableResponse getDefaultInstance() {
14517       return defaultInstance;
14518     }
14519 
getDefaultInstanceForType()14520     public DisableTableResponse getDefaultInstanceForType() {
14521       return defaultInstance;
14522     }
14523 
14524     private final com.google.protobuf.UnknownFieldSet unknownFields;
14525     @java.lang.Override
14526     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()14527         getUnknownFields() {
14528       return this.unknownFields;
14529     }
DisableTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14530     private DisableTableResponse(
14531         com.google.protobuf.CodedInputStream input,
14532         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14533         throws com.google.protobuf.InvalidProtocolBufferException {
14534       initFields();
14535       int mutable_bitField0_ = 0;
14536       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
14537           com.google.protobuf.UnknownFieldSet.newBuilder();
14538       try {
14539         boolean done = false;
14540         while (!done) {
14541           int tag = input.readTag();
14542           switch (tag) {
14543             case 0:
14544               done = true;
14545               break;
14546             default: {
14547               if (!parseUnknownField(input, unknownFields,
14548                                      extensionRegistry, tag)) {
14549                 done = true;
14550               }
14551               break;
14552             }
14553             case 8: {
14554               bitField0_ |= 0x00000001;
14555               procId_ = input.readUInt64();
14556               break;
14557             }
14558           }
14559         }
14560       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14561         throw e.setUnfinishedMessage(this);
14562       } catch (java.io.IOException e) {
14563         throw new com.google.protobuf.InvalidProtocolBufferException(
14564             e.getMessage()).setUnfinishedMessage(this);
14565       } finally {
14566         this.unknownFields = unknownFields.build();
14567         makeExtensionsImmutable();
14568       }
14569     }
14570     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14571         getDescriptor() {
14572       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor;
14573     }
14574 
14575     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14576         internalGetFieldAccessorTable() {
14577       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_fieldAccessorTable
14578           .ensureFieldAccessorsInitialized(
14579               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.Builder.class);
14580     }
14581 
14582     public static com.google.protobuf.Parser<DisableTableResponse> PARSER =
14583         new com.google.protobuf.AbstractParser<DisableTableResponse>() {
14584       public DisableTableResponse parsePartialFrom(
14585           com.google.protobuf.CodedInputStream input,
14586           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14587           throws com.google.protobuf.InvalidProtocolBufferException {
14588         return new DisableTableResponse(input, extensionRegistry);
14589       }
14590     };
14591 
14592     @java.lang.Override
getParserForType()14593     public com.google.protobuf.Parser<DisableTableResponse> getParserForType() {
14594       return PARSER;
14595     }
14596 
14597     private int bitField0_;
14598     // optional uint64 proc_id = 1;
14599     public static final int PROC_ID_FIELD_NUMBER = 1;
14600     private long procId_;
14601     /**
14602      * <code>optional uint64 proc_id = 1;</code>
14603      */
hasProcId()14604     public boolean hasProcId() {
14605       return ((bitField0_ & 0x00000001) == 0x00000001);
14606     }
14607     /**
14608      * <code>optional uint64 proc_id = 1;</code>
14609      */
getProcId()14610     public long getProcId() {
14611       return procId_;
14612     }
14613 
initFields()14614     private void initFields() {
14615       procId_ = 0L;
14616     }
14617     private byte memoizedIsInitialized = -1;
isInitialized()14618     public final boolean isInitialized() {
14619       byte isInitialized = memoizedIsInitialized;
14620       if (isInitialized != -1) return isInitialized == 1;
14621 
14622       memoizedIsInitialized = 1;
14623       return true;
14624     }
14625 
writeTo(com.google.protobuf.CodedOutputStream output)14626     public void writeTo(com.google.protobuf.CodedOutputStream output)
14627                         throws java.io.IOException {
14628       getSerializedSize();
14629       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14630         output.writeUInt64(1, procId_);
14631       }
14632       getUnknownFields().writeTo(output);
14633     }
14634 
14635     private int memoizedSerializedSize = -1;
getSerializedSize()14636     public int getSerializedSize() {
14637       int size = memoizedSerializedSize;
14638       if (size != -1) return size;
14639 
14640       size = 0;
14641       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14642         size += com.google.protobuf.CodedOutputStream
14643           .computeUInt64Size(1, procId_);
14644       }
14645       size += getUnknownFields().getSerializedSize();
14646       memoizedSerializedSize = size;
14647       return size;
14648     }
14649 
14650     private static final long serialVersionUID = 0L;
14651     @java.lang.Override
writeReplace()14652     protected java.lang.Object writeReplace()
14653         throws java.io.ObjectStreamException {
14654       return super.writeReplace();
14655     }
14656 
14657     @java.lang.Override
equals(final java.lang.Object obj)14658     public boolean equals(final java.lang.Object obj) {
14659       if (obj == this) {
14660        return true;
14661       }
14662       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse)) {
14663         return super.equals(obj);
14664       }
14665       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) obj;
14666 
14667       boolean result = true;
14668       result = result && (hasProcId() == other.hasProcId());
14669       if (hasProcId()) {
14670         result = result && (getProcId()
14671             == other.getProcId());
14672       }
14673       result = result &&
14674           getUnknownFields().equals(other.getUnknownFields());
14675       return result;
14676     }
14677 
14678     private int memoizedHashCode = 0;
14679     @java.lang.Override
hashCode()14680     public int hashCode() {
14681       if (memoizedHashCode != 0) {
14682         return memoizedHashCode;
14683       }
14684       int hash = 41;
14685       hash = (19 * hash) + getDescriptorForType().hashCode();
14686       if (hasProcId()) {
14687         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
14688         hash = (53 * hash) + hashLong(getProcId());
14689       }
14690       hash = (29 * hash) + getUnknownFields().hashCode();
14691       memoizedHashCode = hash;
14692       return hash;
14693     }
14694 
parseFrom( com.google.protobuf.ByteString data)14695     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(
14696         com.google.protobuf.ByteString data)
14697         throws com.google.protobuf.InvalidProtocolBufferException {
14698       return PARSER.parseFrom(data);
14699     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14700     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(
14701         com.google.protobuf.ByteString data,
14702         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14703         throws com.google.protobuf.InvalidProtocolBufferException {
14704       return PARSER.parseFrom(data, extensionRegistry);
14705     }
parseFrom(byte[] data)14706     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(byte[] data)
14707         throws com.google.protobuf.InvalidProtocolBufferException {
14708       return PARSER.parseFrom(data);
14709     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14710     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(
14711         byte[] data,
14712         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14713         throws com.google.protobuf.InvalidProtocolBufferException {
14714       return PARSER.parseFrom(data, extensionRegistry);
14715     }
parseFrom(java.io.InputStream input)14716     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(java.io.InputStream input)
14717         throws java.io.IOException {
14718       return PARSER.parseFrom(input);
14719     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14720     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(
14721         java.io.InputStream input,
14722         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14723         throws java.io.IOException {
14724       return PARSER.parseFrom(input, extensionRegistry);
14725     }
parseDelimitedFrom(java.io.InputStream input)14726     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom(java.io.InputStream input)
14727         throws java.io.IOException {
14728       return PARSER.parseDelimitedFrom(input);
14729     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14730     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseDelimitedFrom(
14731         java.io.InputStream input,
14732         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14733         throws java.io.IOException {
14734       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14735     }
parseFrom( com.google.protobuf.CodedInputStream input)14736     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(
14737         com.google.protobuf.CodedInputStream input)
14738         throws java.io.IOException {
14739       return PARSER.parseFrom(input);
14740     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14741     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parseFrom(
14742         com.google.protobuf.CodedInputStream input,
14743         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14744         throws java.io.IOException {
14745       return PARSER.parseFrom(input, extensionRegistry);
14746     }
14747 
newBuilder()14748     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()14749     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse prototype)14750     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse prototype) {
14751       return newBuilder().mergeFrom(prototype);
14752     }
toBuilder()14753     public Builder toBuilder() { return newBuilder(this); }
14754 
14755     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)14756     protected Builder newBuilderForType(
14757         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14758       Builder builder = new Builder(parent);
14759       return builder;
14760     }
14761     /**
14762      * Protobuf type {@code DisableTableResponse}
14763      */
14764     public static final class Builder extends
14765         com.google.protobuf.GeneratedMessage.Builder<Builder>
14766        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponseOrBuilder {
14767       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14768           getDescriptor() {
14769         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor;
14770       }
14771 
14772       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14773           internalGetFieldAccessorTable() {
14774         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_fieldAccessorTable
14775             .ensureFieldAccessorsInitialized(
14776                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.Builder.class);
14777       }
14778 
14779       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.newBuilder()
Builder()14780       private Builder() {
14781         maybeForceBuilderInitialization();
14782       }
14783 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)14784       private Builder(
14785           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14786         super(parent);
14787         maybeForceBuilderInitialization();
14788       }
maybeForceBuilderInitialization()14789       private void maybeForceBuilderInitialization() {
14790         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14791         }
14792       }
create()14793       private static Builder create() {
14794         return new Builder();
14795       }
14796 
clear()14797       public Builder clear() {
14798         super.clear();
14799         procId_ = 0L;
14800         bitField0_ = (bitField0_ & ~0x00000001);
14801         return this;
14802       }
14803 
clone()14804       public Builder clone() {
14805         return create().mergeFrom(buildPartial());
14806       }
14807 
14808       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()14809           getDescriptorForType() {
14810         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DisableTableResponse_descriptor;
14811       }
14812 
getDefaultInstanceForType()14813       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse getDefaultInstanceForType() {
14814         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance();
14815       }
14816 
build()14817       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse build() {
14818         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = buildPartial();
14819         if (!result.isInitialized()) {
14820           throw newUninitializedMessageException(result);
14821         }
14822         return result;
14823       }
14824 
buildPartial()14825       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse buildPartial() {
14826         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse(this);
14827         int from_bitField0_ = bitField0_;
14828         int to_bitField0_ = 0;
14829         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14830           to_bitField0_ |= 0x00000001;
14831         }
14832         result.procId_ = procId_;
14833         result.bitField0_ = to_bitField0_;
14834         onBuilt();
14835         return result;
14836       }
14837 
mergeFrom(com.google.protobuf.Message other)14838       public Builder mergeFrom(com.google.protobuf.Message other) {
14839         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) {
14840           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse)other);
14841         } else {
14842           super.mergeFrom(other);
14843           return this;
14844         }
14845       }
14846 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other)14847       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse other) {
14848         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance()) return this;
14849         if (other.hasProcId()) {
14850           setProcId(other.getProcId());
14851         }
14852         this.mergeUnknownFields(other.getUnknownFields());
14853         return this;
14854       }
14855 
isInitialized()14856       public final boolean isInitialized() {
14857         return true;
14858       }
14859 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14860       public Builder mergeFrom(
14861           com.google.protobuf.CodedInputStream input,
14862           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14863           throws java.io.IOException {
14864         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse parsedMessage = null;
14865         try {
14866           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14867         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14868           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) e.getUnfinishedMessage();
14869           throw e;
14870         } finally {
14871           if (parsedMessage != null) {
14872             mergeFrom(parsedMessage);
14873           }
14874         }
14875         return this;
14876       }
14877       private int bitField0_;
14878 
14879       // optional uint64 proc_id = 1;
14880       private long procId_ ;
14881       /**
14882        * <code>optional uint64 proc_id = 1;</code>
14883        */
hasProcId()14884       public boolean hasProcId() {
14885         return ((bitField0_ & 0x00000001) == 0x00000001);
14886       }
14887       /**
14888        * <code>optional uint64 proc_id = 1;</code>
14889        */
getProcId()14890       public long getProcId() {
14891         return procId_;
14892       }
14893       /**
14894        * <code>optional uint64 proc_id = 1;</code>
14895        */
setProcId(long value)14896       public Builder setProcId(long value) {
14897         bitField0_ |= 0x00000001;
14898         procId_ = value;
14899         onChanged();
14900         return this;
14901       }
14902       /**
14903        * <code>optional uint64 proc_id = 1;</code>
14904        */
clearProcId()14905       public Builder clearProcId() {
14906         bitField0_ = (bitField0_ & ~0x00000001);
14907         procId_ = 0L;
14908         onChanged();
14909         return this;
14910       }
14911 
14912       // @@protoc_insertion_point(builder_scope:DisableTableResponse)
14913     }
14914 
14915     static {
14916       defaultInstance = new DisableTableResponse(true);
defaultInstance.initFields()14917       defaultInstance.initFields();
14918     }
14919 
14920     // @@protoc_insertion_point(class_scope:DisableTableResponse)
14921   }
14922 
14923   public interface ModifyTableRequestOrBuilder
14924       extends com.google.protobuf.MessageOrBuilder {
14925 
14926     // required .TableName table_name = 1;
14927     /**
14928      * <code>required .TableName table_name = 1;</code>
14929      */
hasTableName()14930     boolean hasTableName();
14931     /**
14932      * <code>required .TableName table_name = 1;</code>
14933      */
getTableName()14934     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
14935     /**
14936      * <code>required .TableName table_name = 1;</code>
14937      */
getTableNameOrBuilder()14938     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
14939 
14940     // required .TableSchema table_schema = 2;
14941     /**
14942      * <code>required .TableSchema table_schema = 2;</code>
14943      */
hasTableSchema()14944     boolean hasTableSchema();
14945     /**
14946      * <code>required .TableSchema table_schema = 2;</code>
14947      */
getTableSchema()14948     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema();
14949     /**
14950      * <code>required .TableSchema table_schema = 2;</code>
14951      */
getTableSchemaOrBuilder()14952     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder();
14953 
14954     // optional uint64 nonce_group = 3 [default = 0];
14955     /**
14956      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
14957      */
hasNonceGroup()14958     boolean hasNonceGroup();
14959     /**
14960      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
14961      */
getNonceGroup()14962     long getNonceGroup();
14963 
14964     // optional uint64 nonce = 4 [default = 0];
14965     /**
14966      * <code>optional uint64 nonce = 4 [default = 0];</code>
14967      */
hasNonce()14968     boolean hasNonce();
14969     /**
14970      * <code>optional uint64 nonce = 4 [default = 0];</code>
14971      */
getNonce()14972     long getNonce();
14973   }
14974   /**
14975    * Protobuf type {@code ModifyTableRequest}
14976    */
14977   public static final class ModifyTableRequest extends
14978       com.google.protobuf.GeneratedMessage
14979       implements ModifyTableRequestOrBuilder {
14980     // Use ModifyTableRequest.newBuilder() to construct.
ModifyTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)14981     private ModifyTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14982       super(builder);
14983       this.unknownFields = builder.getUnknownFields();
14984     }
ModifyTableRequest(boolean noInit)14985     private ModifyTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14986 
14987     private static final ModifyTableRequest defaultInstance;
getDefaultInstance()14988     public static ModifyTableRequest getDefaultInstance() {
14989       return defaultInstance;
14990     }
14991 
getDefaultInstanceForType()14992     public ModifyTableRequest getDefaultInstanceForType() {
14993       return defaultInstance;
14994     }
14995 
14996     private final com.google.protobuf.UnknownFieldSet unknownFields;
14997     @java.lang.Override
14998     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()14999         getUnknownFields() {
15000       return this.unknownFields;
15001     }
ModifyTableRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15002     private ModifyTableRequest(
15003         com.google.protobuf.CodedInputStream input,
15004         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15005         throws com.google.protobuf.InvalidProtocolBufferException {
15006       initFields();
15007       int mutable_bitField0_ = 0;
15008       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15009           com.google.protobuf.UnknownFieldSet.newBuilder();
15010       try {
15011         boolean done = false;
15012         while (!done) {
15013           int tag = input.readTag();
15014           switch (tag) {
15015             case 0:
15016               done = true;
15017               break;
15018             default: {
15019               if (!parseUnknownField(input, unknownFields,
15020                                      extensionRegistry, tag)) {
15021                 done = true;
15022               }
15023               break;
15024             }
15025             case 10: {
15026               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
15027               if (((bitField0_ & 0x00000001) == 0x00000001)) {
15028                 subBuilder = tableName_.toBuilder();
15029               }
15030               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
15031               if (subBuilder != null) {
15032                 subBuilder.mergeFrom(tableName_);
15033                 tableName_ = subBuilder.buildPartial();
15034               }
15035               bitField0_ |= 0x00000001;
15036               break;
15037             }
15038             case 18: {
15039               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null;
15040               if (((bitField0_ & 0x00000002) == 0x00000002)) {
15041                 subBuilder = tableSchema_.toBuilder();
15042               }
15043               tableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry);
15044               if (subBuilder != null) {
15045                 subBuilder.mergeFrom(tableSchema_);
15046                 tableSchema_ = subBuilder.buildPartial();
15047               }
15048               bitField0_ |= 0x00000002;
15049               break;
15050             }
15051             case 24: {
15052               bitField0_ |= 0x00000004;
15053               nonceGroup_ = input.readUInt64();
15054               break;
15055             }
15056             case 32: {
15057               bitField0_ |= 0x00000008;
15058               nonce_ = input.readUInt64();
15059               break;
15060             }
15061           }
15062         }
15063       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15064         throw e.setUnfinishedMessage(this);
15065       } catch (java.io.IOException e) {
15066         throw new com.google.protobuf.InvalidProtocolBufferException(
15067             e.getMessage()).setUnfinishedMessage(this);
15068       } finally {
15069         this.unknownFields = unknownFields.build();
15070         makeExtensionsImmutable();
15071       }
15072     }
15073     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15074         getDescriptor() {
15075       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor;
15076     }
15077 
15078     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15079         internalGetFieldAccessorTable() {
15080       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_fieldAccessorTable
15081           .ensureFieldAccessorsInitialized(
15082               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.Builder.class);
15083     }
15084 
15085     public static com.google.protobuf.Parser<ModifyTableRequest> PARSER =
15086         new com.google.protobuf.AbstractParser<ModifyTableRequest>() {
15087       public ModifyTableRequest parsePartialFrom(
15088           com.google.protobuf.CodedInputStream input,
15089           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15090           throws com.google.protobuf.InvalidProtocolBufferException {
15091         return new ModifyTableRequest(input, extensionRegistry);
15092       }
15093     };
15094 
15095     @java.lang.Override
getParserForType()15096     public com.google.protobuf.Parser<ModifyTableRequest> getParserForType() {
15097       return PARSER;
15098     }
15099 
15100     private int bitField0_;
15101     // required .TableName table_name = 1;
15102     public static final int TABLE_NAME_FIELD_NUMBER = 1;
15103     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
15104     /**
15105      * <code>required .TableName table_name = 1;</code>
15106      */
hasTableName()15107     public boolean hasTableName() {
15108       return ((bitField0_ & 0x00000001) == 0x00000001);
15109     }
15110     /**
15111      * <code>required .TableName table_name = 1;</code>
15112      */
getTableName()15113     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
15114       return tableName_;
15115     }
15116     /**
15117      * <code>required .TableName table_name = 1;</code>
15118      */
getTableNameOrBuilder()15119     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
15120       return tableName_;
15121     }
15122 
15123     // required .TableSchema table_schema = 2;
15124     public static final int TABLE_SCHEMA_FIELD_NUMBER = 2;
15125     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_;
15126     /**
15127      * <code>required .TableSchema table_schema = 2;</code>
15128      */
hasTableSchema()15129     public boolean hasTableSchema() {
15130       return ((bitField0_ & 0x00000002) == 0x00000002);
15131     }
15132     /**
15133      * <code>required .TableSchema table_schema = 2;</code>
15134      */
getTableSchema()15135     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
15136       return tableSchema_;
15137     }
15138     /**
15139      * <code>required .TableSchema table_schema = 2;</code>
15140      */
getTableSchemaOrBuilder()15141     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
15142       return tableSchema_;
15143     }
15144 
15145     // optional uint64 nonce_group = 3 [default = 0];
15146     public static final int NONCE_GROUP_FIELD_NUMBER = 3;
15147     private long nonceGroup_;
15148     /**
15149      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
15150      */
hasNonceGroup()15151     public boolean hasNonceGroup() {
15152       return ((bitField0_ & 0x00000004) == 0x00000004);
15153     }
15154     /**
15155      * <code>optional uint64 nonce_group = 3 [default = 0];</code>
15156      */
getNonceGroup()15157     public long getNonceGroup() {
15158       return nonceGroup_;
15159     }
15160 
15161     // optional uint64 nonce = 4 [default = 0];
15162     public static final int NONCE_FIELD_NUMBER = 4;
15163     private long nonce_;
15164     /**
15165      * <code>optional uint64 nonce = 4 [default = 0];</code>
15166      */
hasNonce()15167     public boolean hasNonce() {
15168       return ((bitField0_ & 0x00000008) == 0x00000008);
15169     }
15170     /**
15171      * <code>optional uint64 nonce = 4 [default = 0];</code>
15172      */
getNonce()15173     public long getNonce() {
15174       return nonce_;
15175     }
15176 
initFields()15177     private void initFields() {
15178       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
15179       tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
15180       nonceGroup_ = 0L;
15181       nonce_ = 0L;
15182     }
15183     private byte memoizedIsInitialized = -1;
isInitialized()15184     public final boolean isInitialized() {
15185       byte isInitialized = memoizedIsInitialized;
15186       if (isInitialized != -1) return isInitialized == 1;
15187 
15188       if (!hasTableName()) {
15189         memoizedIsInitialized = 0;
15190         return false;
15191       }
15192       if (!hasTableSchema()) {
15193         memoizedIsInitialized = 0;
15194         return false;
15195       }
15196       if (!getTableName().isInitialized()) {
15197         memoizedIsInitialized = 0;
15198         return false;
15199       }
15200       if (!getTableSchema().isInitialized()) {
15201         memoizedIsInitialized = 0;
15202         return false;
15203       }
15204       memoizedIsInitialized = 1;
15205       return true;
15206     }
15207 
writeTo(com.google.protobuf.CodedOutputStream output)15208     public void writeTo(com.google.protobuf.CodedOutputStream output)
15209                         throws java.io.IOException {
15210       getSerializedSize();
15211       if (((bitField0_ & 0x00000001) == 0x00000001)) {
15212         output.writeMessage(1, tableName_);
15213       }
15214       if (((bitField0_ & 0x00000002) == 0x00000002)) {
15215         output.writeMessage(2, tableSchema_);
15216       }
15217       if (((bitField0_ & 0x00000004) == 0x00000004)) {
15218         output.writeUInt64(3, nonceGroup_);
15219       }
15220       if (((bitField0_ & 0x00000008) == 0x00000008)) {
15221         output.writeUInt64(4, nonce_);
15222       }
15223       getUnknownFields().writeTo(output);
15224     }
15225 
15226     private int memoizedSerializedSize = -1;
getSerializedSize()15227     public int getSerializedSize() {
15228       int size = memoizedSerializedSize;
15229       if (size != -1) return size;
15230 
15231       size = 0;
15232       if (((bitField0_ & 0x00000001) == 0x00000001)) {
15233         size += com.google.protobuf.CodedOutputStream
15234           .computeMessageSize(1, tableName_);
15235       }
15236       if (((bitField0_ & 0x00000002) == 0x00000002)) {
15237         size += com.google.protobuf.CodedOutputStream
15238           .computeMessageSize(2, tableSchema_);
15239       }
15240       if (((bitField0_ & 0x00000004) == 0x00000004)) {
15241         size += com.google.protobuf.CodedOutputStream
15242           .computeUInt64Size(3, nonceGroup_);
15243       }
15244       if (((bitField0_ & 0x00000008) == 0x00000008)) {
15245         size += com.google.protobuf.CodedOutputStream
15246           .computeUInt64Size(4, nonce_);
15247       }
15248       size += getUnknownFields().getSerializedSize();
15249       memoizedSerializedSize = size;
15250       return size;
15251     }
15252 
15253     private static final long serialVersionUID = 0L;
15254     @java.lang.Override
writeReplace()15255     protected java.lang.Object writeReplace()
15256         throws java.io.ObjectStreamException {
15257       return super.writeReplace();
15258     }
15259 
15260     @java.lang.Override
equals(final java.lang.Object obj)15261     public boolean equals(final java.lang.Object obj) {
15262       if (obj == this) {
15263        return true;
15264       }
15265       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)) {
15266         return super.equals(obj);
15267       }
15268       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) obj;
15269 
15270       boolean result = true;
15271       result = result && (hasTableName() == other.hasTableName());
15272       if (hasTableName()) {
15273         result = result && getTableName()
15274             .equals(other.getTableName());
15275       }
15276       result = result && (hasTableSchema() == other.hasTableSchema());
15277       if (hasTableSchema()) {
15278         result = result && getTableSchema()
15279             .equals(other.getTableSchema());
15280       }
15281       result = result && (hasNonceGroup() == other.hasNonceGroup());
15282       if (hasNonceGroup()) {
15283         result = result && (getNonceGroup()
15284             == other.getNonceGroup());
15285       }
15286       result = result && (hasNonce() == other.hasNonce());
15287       if (hasNonce()) {
15288         result = result && (getNonce()
15289             == other.getNonce());
15290       }
15291       result = result &&
15292           getUnknownFields().equals(other.getUnknownFields());
15293       return result;
15294     }
15295 
15296     private int memoizedHashCode = 0;
15297     @java.lang.Override
hashCode()15298     public int hashCode() {
15299       if (memoizedHashCode != 0) {
15300         return memoizedHashCode;
15301       }
15302       int hash = 41;
15303       hash = (19 * hash) + getDescriptorForType().hashCode();
15304       if (hasTableName()) {
15305         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
15306         hash = (53 * hash) + getTableName().hashCode();
15307       }
15308       if (hasTableSchema()) {
15309         hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER;
15310         hash = (53 * hash) + getTableSchema().hashCode();
15311       }
15312       if (hasNonceGroup()) {
15313         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
15314         hash = (53 * hash) + hashLong(getNonceGroup());
15315       }
15316       if (hasNonce()) {
15317         hash = (37 * hash) + NONCE_FIELD_NUMBER;
15318         hash = (53 * hash) + hashLong(getNonce());
15319       }
15320       hash = (29 * hash) + getUnknownFields().hashCode();
15321       memoizedHashCode = hash;
15322       return hash;
15323     }
15324 
parseFrom( com.google.protobuf.ByteString data)15325     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(
15326         com.google.protobuf.ByteString data)
15327         throws com.google.protobuf.InvalidProtocolBufferException {
15328       return PARSER.parseFrom(data);
15329     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15330     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(
15331         com.google.protobuf.ByteString data,
15332         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15333         throws com.google.protobuf.InvalidProtocolBufferException {
15334       return PARSER.parseFrom(data, extensionRegistry);
15335     }
parseFrom(byte[] data)15336     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(byte[] data)
15337         throws com.google.protobuf.InvalidProtocolBufferException {
15338       return PARSER.parseFrom(data);
15339     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15340     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(
15341         byte[] data,
15342         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15343         throws com.google.protobuf.InvalidProtocolBufferException {
15344       return PARSER.parseFrom(data, extensionRegistry);
15345     }
parseFrom(java.io.InputStream input)15346     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(java.io.InputStream input)
15347         throws java.io.IOException {
15348       return PARSER.parseFrom(input);
15349     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15350     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(
15351         java.io.InputStream input,
15352         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15353         throws java.io.IOException {
15354       return PARSER.parseFrom(input, extensionRegistry);
15355     }
parseDelimitedFrom(java.io.InputStream input)15356     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom(java.io.InputStream input)
15357         throws java.io.IOException {
15358       return PARSER.parseDelimitedFrom(input);
15359     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15360     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseDelimitedFrom(
15361         java.io.InputStream input,
15362         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15363         throws java.io.IOException {
15364       return PARSER.parseDelimitedFrom(input, extensionRegistry);
15365     }
parseFrom( com.google.protobuf.CodedInputStream input)15366     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(
15367         com.google.protobuf.CodedInputStream input)
15368         throws java.io.IOException {
15369       return PARSER.parseFrom(input);
15370     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15371     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parseFrom(
15372         com.google.protobuf.CodedInputStream input,
15373         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15374         throws java.io.IOException {
15375       return PARSER.parseFrom(input, extensionRegistry);
15376     }
15377 
newBuilder()15378     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()15379     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest prototype)15380     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest prototype) {
15381       return newBuilder().mergeFrom(prototype);
15382     }
toBuilder()15383     public Builder toBuilder() { return newBuilder(this); }
15384 
15385     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)15386     protected Builder newBuilderForType(
15387         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15388       Builder builder = new Builder(parent);
15389       return builder;
15390     }
15391     /**
15392      * Protobuf type {@code ModifyTableRequest}
15393      */
15394     public static final class Builder extends
15395         com.google.protobuf.GeneratedMessage.Builder<Builder>
15396        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequestOrBuilder {
15397       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15398           getDescriptor() {
15399         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor;
15400       }
15401 
15402       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15403           internalGetFieldAccessorTable() {
15404         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_fieldAccessorTable
15405             .ensureFieldAccessorsInitialized(
15406                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.Builder.class);
15407       }
15408 
15409       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.newBuilder()
Builder()15410       private Builder() {
15411         maybeForceBuilderInitialization();
15412       }
15413 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)15414       private Builder(
15415           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15416         super(parent);
15417         maybeForceBuilderInitialization();
15418       }
maybeForceBuilderInitialization()15419       private void maybeForceBuilderInitialization() {
15420         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15421           getTableNameFieldBuilder();
15422           getTableSchemaFieldBuilder();
15423         }
15424       }
create()15425       private static Builder create() {
15426         return new Builder();
15427       }
15428 
clear()15429       public Builder clear() {
15430         super.clear();
15431         if (tableNameBuilder_ == null) {
15432           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
15433         } else {
15434           tableNameBuilder_.clear();
15435         }
15436         bitField0_ = (bitField0_ & ~0x00000001);
15437         if (tableSchemaBuilder_ == null) {
15438           tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
15439         } else {
15440           tableSchemaBuilder_.clear();
15441         }
15442         bitField0_ = (bitField0_ & ~0x00000002);
15443         nonceGroup_ = 0L;
15444         bitField0_ = (bitField0_ & ~0x00000004);
15445         nonce_ = 0L;
15446         bitField0_ = (bitField0_ & ~0x00000008);
15447         return this;
15448       }
15449 
clone()15450       public Builder clone() {
15451         return create().mergeFrom(buildPartial());
15452       }
15453 
15454       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()15455           getDescriptorForType() {
15456         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableRequest_descriptor;
15457       }
15458 
getDefaultInstanceForType()15459       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest getDefaultInstanceForType() {
15460         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance();
15461       }
15462 
build()15463       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest build() {
15464         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = buildPartial();
15465         if (!result.isInitialized()) {
15466           throw newUninitializedMessageException(result);
15467         }
15468         return result;
15469       }
15470 
buildPartial()15471       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest buildPartial() {
15472         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest(this);
15473         int from_bitField0_ = bitField0_;
15474         int to_bitField0_ = 0;
15475         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
15476           to_bitField0_ |= 0x00000001;
15477         }
15478         if (tableNameBuilder_ == null) {
15479           result.tableName_ = tableName_;
15480         } else {
15481           result.tableName_ = tableNameBuilder_.build();
15482         }
15483         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
15484           to_bitField0_ |= 0x00000002;
15485         }
15486         if (tableSchemaBuilder_ == null) {
15487           result.tableSchema_ = tableSchema_;
15488         } else {
15489           result.tableSchema_ = tableSchemaBuilder_.build();
15490         }
15491         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
15492           to_bitField0_ |= 0x00000004;
15493         }
15494         result.nonceGroup_ = nonceGroup_;
15495         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
15496           to_bitField0_ |= 0x00000008;
15497         }
15498         result.nonce_ = nonce_;
15499         result.bitField0_ = to_bitField0_;
15500         onBuilt();
15501         return result;
15502       }
15503 
mergeFrom(com.google.protobuf.Message other)15504       public Builder mergeFrom(com.google.protobuf.Message other) {
15505         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) {
15506           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)other);
15507         } else {
15508           super.mergeFrom(other);
15509           return this;
15510         }
15511       }
15512 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other)15513       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest other) {
15514         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance()) return this;
15515         if (other.hasTableName()) {
15516           mergeTableName(other.getTableName());
15517         }
15518         if (other.hasTableSchema()) {
15519           mergeTableSchema(other.getTableSchema());
15520         }
15521         if (other.hasNonceGroup()) {
15522           setNonceGroup(other.getNonceGroup());
15523         }
15524         if (other.hasNonce()) {
15525           setNonce(other.getNonce());
15526         }
15527         this.mergeUnknownFields(other.getUnknownFields());
15528         return this;
15529       }
15530 
isInitialized()15531       public final boolean isInitialized() {
15532         if (!hasTableName()) {
15533 
15534           return false;
15535         }
15536         if (!hasTableSchema()) {
15537 
15538           return false;
15539         }
15540         if (!getTableName().isInitialized()) {
15541 
15542           return false;
15543         }
15544         if (!getTableSchema().isInitialized()) {
15545 
15546           return false;
15547         }
15548         return true;
15549       }
15550 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15551       public Builder mergeFrom(
15552           com.google.protobuf.CodedInputStream input,
15553           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15554           throws java.io.IOException {
15555         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest parsedMessage = null;
15556         try {
15557           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15558         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15559           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest) e.getUnfinishedMessage();
15560           throw e;
15561         } finally {
15562           if (parsedMessage != null) {
15563             mergeFrom(parsedMessage);
15564           }
15565         }
15566         return this;
15567       }
15568       private int bitField0_;
15569 
15570       // required .TableName table_name = 1;
15571       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
15572       private com.google.protobuf.SingleFieldBuilder<
15573           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
15574       /**
15575        * <code>required .TableName table_name = 1;</code>
15576        */
hasTableName()15577       public boolean hasTableName() {
15578         return ((bitField0_ & 0x00000001) == 0x00000001);
15579       }
15580       /**
15581        * <code>required .TableName table_name = 1;</code>
15582        */
getTableName()15583       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
15584         if (tableNameBuilder_ == null) {
15585           return tableName_;
15586         } else {
15587           return tableNameBuilder_.getMessage();
15588         }
15589       }
15590       /**
15591        * <code>required .TableName table_name = 1;</code>
15592        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)15593       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
15594         if (tableNameBuilder_ == null) {
15595           if (value == null) {
15596             throw new NullPointerException();
15597           }
15598           tableName_ = value;
15599           onChanged();
15600         } else {
15601           tableNameBuilder_.setMessage(value);
15602         }
15603         bitField0_ |= 0x00000001;
15604         return this;
15605       }
15606       /**
15607        * <code>required .TableName table_name = 1;</code>
15608        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)15609       public Builder setTableName(
15610           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
15611         if (tableNameBuilder_ == null) {
15612           tableName_ = builderForValue.build();
15613           onChanged();
15614         } else {
15615           tableNameBuilder_.setMessage(builderForValue.build());
15616         }
15617         bitField0_ |= 0x00000001;
15618         return this;
15619       }
15620       /**
15621        * <code>required .TableName table_name = 1;</code>
15622        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)15623       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
15624         if (tableNameBuilder_ == null) {
15625           if (((bitField0_ & 0x00000001) == 0x00000001) &&
15626               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
15627             tableName_ =
15628               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
15629           } else {
15630             tableName_ = value;
15631           }
15632           onChanged();
15633         } else {
15634           tableNameBuilder_.mergeFrom(value);
15635         }
15636         bitField0_ |= 0x00000001;
15637         return this;
15638       }
15639       /**
15640        * <code>required .TableName table_name = 1;</code>
15641        */
clearTableName()15642       public Builder clearTableName() {
15643         if (tableNameBuilder_ == null) {
15644           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
15645           onChanged();
15646         } else {
15647           tableNameBuilder_.clear();
15648         }
15649         bitField0_ = (bitField0_ & ~0x00000001);
15650         return this;
15651       }
15652       /**
15653        * <code>required .TableName table_name = 1;</code>
15654        */
getTableNameBuilder()15655       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
15656         bitField0_ |= 0x00000001;
15657         onChanged();
15658         return getTableNameFieldBuilder().getBuilder();
15659       }
15660       /**
15661        * <code>required .TableName table_name = 1;</code>
15662        */
getTableNameOrBuilder()15663       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
15664         if (tableNameBuilder_ != null) {
15665           return tableNameBuilder_.getMessageOrBuilder();
15666         } else {
15667           return tableName_;
15668         }
15669       }
15670       /**
15671        * <code>required .TableName table_name = 1;</code>
15672        */
15673       private com.google.protobuf.SingleFieldBuilder<
15674           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()15675           getTableNameFieldBuilder() {
15676         if (tableNameBuilder_ == null) {
15677           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15678               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
15679                   tableName_,
15680                   getParentForChildren(),
15681                   isClean());
15682           tableName_ = null;
15683         }
15684         return tableNameBuilder_;
15685       }
15686 
15687       // required .TableSchema table_schema = 2;
15688       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
15689       private com.google.protobuf.SingleFieldBuilder<
15690           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_;
15691       /**
15692        * <code>required .TableSchema table_schema = 2;</code>
15693        */
hasTableSchema()15694       public boolean hasTableSchema() {
15695         return ((bitField0_ & 0x00000002) == 0x00000002);
15696       }
15697       /**
15698        * <code>required .TableSchema table_schema = 2;</code>
15699        */
getTableSchema()15700       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema() {
15701         if (tableSchemaBuilder_ == null) {
15702           return tableSchema_;
15703         } else {
15704           return tableSchemaBuilder_.getMessage();
15705         }
15706       }
15707       /**
15708        * <code>required .TableSchema table_schema = 2;</code>
15709        */
setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)15710       public Builder setTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
15711         if (tableSchemaBuilder_ == null) {
15712           if (value == null) {
15713             throw new NullPointerException();
15714           }
15715           tableSchema_ = value;
15716           onChanged();
15717         } else {
15718           tableSchemaBuilder_.setMessage(value);
15719         }
15720         bitField0_ |= 0x00000002;
15721         return this;
15722       }
15723       /**
15724        * <code>required .TableSchema table_schema = 2;</code>
15725        */
setTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)15726       public Builder setTableSchema(
15727           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
15728         if (tableSchemaBuilder_ == null) {
15729           tableSchema_ = builderForValue.build();
15730           onChanged();
15731         } else {
15732           tableSchemaBuilder_.setMessage(builderForValue.build());
15733         }
15734         bitField0_ |= 0x00000002;
15735         return this;
15736       }
15737       /**
15738        * <code>required .TableSchema table_schema = 2;</code>
15739        */
mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)15740       public Builder mergeTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
15741         if (tableSchemaBuilder_ == null) {
15742           if (((bitField0_ & 0x00000002) == 0x00000002) &&
15743               tableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) {
15744             tableSchema_ =
15745               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(tableSchema_).mergeFrom(value).buildPartial();
15746           } else {
15747             tableSchema_ = value;
15748           }
15749           onChanged();
15750         } else {
15751           tableSchemaBuilder_.mergeFrom(value);
15752         }
15753         bitField0_ |= 0x00000002;
15754         return this;
15755       }
15756       /**
15757        * <code>required .TableSchema table_schema = 2;</code>
15758        */
clearTableSchema()15759       public Builder clearTableSchema() {
15760         if (tableSchemaBuilder_ == null) {
15761           tableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
15762           onChanged();
15763         } else {
15764           tableSchemaBuilder_.clear();
15765         }
15766         bitField0_ = (bitField0_ & ~0x00000002);
15767         return this;
15768       }
15769       /**
15770        * <code>required .TableSchema table_schema = 2;</code>
15771        */
getTableSchemaBuilder()15772       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder() {
15773         bitField0_ |= 0x00000002;
15774         onChanged();
15775         return getTableSchemaFieldBuilder().getBuilder();
15776       }
15777       /**
15778        * <code>required .TableSchema table_schema = 2;</code>
15779        */
getTableSchemaOrBuilder()15780       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder() {
15781         if (tableSchemaBuilder_ != null) {
15782           return tableSchemaBuilder_.getMessageOrBuilder();
15783         } else {
15784           return tableSchema_;
15785         }
15786       }
15787       /**
15788        * <code>required .TableSchema table_schema = 2;</code>
15789        */
15790       private com.google.protobuf.SingleFieldBuilder<
15791           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaFieldBuilder()15792           getTableSchemaFieldBuilder() {
15793         if (tableSchemaBuilder_ == null) {
15794           tableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15795               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
15796                   tableSchema_,
15797                   getParentForChildren(),
15798                   isClean());
15799           tableSchema_ = null;
15800         }
15801         return tableSchemaBuilder_;
15802       }
15803 
15804       // optional uint64 nonce_group = 3 [default = 0];
15805       private long nonceGroup_ ;
15806       /**
15807        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
15808        */
hasNonceGroup()15809       public boolean hasNonceGroup() {
15810         return ((bitField0_ & 0x00000004) == 0x00000004);
15811       }
15812       /**
15813        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
15814        */
getNonceGroup()15815       public long getNonceGroup() {
15816         return nonceGroup_;
15817       }
15818       /**
15819        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
15820        */
setNonceGroup(long value)15821       public Builder setNonceGroup(long value) {
15822         bitField0_ |= 0x00000004;
15823         nonceGroup_ = value;
15824         onChanged();
15825         return this;
15826       }
15827       /**
15828        * <code>optional uint64 nonce_group = 3 [default = 0];</code>
15829        */
clearNonceGroup()15830       public Builder clearNonceGroup() {
15831         bitField0_ = (bitField0_ & ~0x00000004);
15832         nonceGroup_ = 0L;
15833         onChanged();
15834         return this;
15835       }
15836 
15837       // optional uint64 nonce = 4 [default = 0];
15838       private long nonce_ ;
15839       /**
15840        * <code>optional uint64 nonce = 4 [default = 0];</code>
15841        */
hasNonce()15842       public boolean hasNonce() {
15843         return ((bitField0_ & 0x00000008) == 0x00000008);
15844       }
15845       /**
15846        * <code>optional uint64 nonce = 4 [default = 0];</code>
15847        */
getNonce()15848       public long getNonce() {
15849         return nonce_;
15850       }
15851       /**
15852        * <code>optional uint64 nonce = 4 [default = 0];</code>
15853        */
setNonce(long value)15854       public Builder setNonce(long value) {
15855         bitField0_ |= 0x00000008;
15856         nonce_ = value;
15857         onChanged();
15858         return this;
15859       }
15860       /**
15861        * <code>optional uint64 nonce = 4 [default = 0];</code>
15862        */
clearNonce()15863       public Builder clearNonce() {
15864         bitField0_ = (bitField0_ & ~0x00000008);
15865         nonce_ = 0L;
15866         onChanged();
15867         return this;
15868       }
15869 
15870       // @@protoc_insertion_point(builder_scope:ModifyTableRequest)
15871     }
15872 
15873     static {
15874       defaultInstance = new ModifyTableRequest(true);
defaultInstance.initFields()15875       defaultInstance.initFields();
15876     }
15877 
15878     // @@protoc_insertion_point(class_scope:ModifyTableRequest)
15879   }
15880 
15881   public interface ModifyTableResponseOrBuilder
15882       extends com.google.protobuf.MessageOrBuilder {
15883   }
15884   /**
15885    * Protobuf type {@code ModifyTableResponse}
15886    */
15887   public static final class ModifyTableResponse extends
15888       com.google.protobuf.GeneratedMessage
15889       implements ModifyTableResponseOrBuilder {
15890     // Use ModifyTableResponse.newBuilder() to construct.
ModifyTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)15891     private ModifyTableResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15892       super(builder);
15893       this.unknownFields = builder.getUnknownFields();
15894     }
ModifyTableResponse(boolean noInit)15895     private ModifyTableResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15896 
15897     private static final ModifyTableResponse defaultInstance;
getDefaultInstance()15898     public static ModifyTableResponse getDefaultInstance() {
15899       return defaultInstance;
15900     }
15901 
getDefaultInstanceForType()15902     public ModifyTableResponse getDefaultInstanceForType() {
15903       return defaultInstance;
15904     }
15905 
15906     private final com.google.protobuf.UnknownFieldSet unknownFields;
15907     @java.lang.Override
15908     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()15909         getUnknownFields() {
15910       return this.unknownFields;
15911     }
ModifyTableResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15912     private ModifyTableResponse(
15913         com.google.protobuf.CodedInputStream input,
15914         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15915         throws com.google.protobuf.InvalidProtocolBufferException {
15916       initFields();
15917       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15918           com.google.protobuf.UnknownFieldSet.newBuilder();
15919       try {
15920         boolean done = false;
15921         while (!done) {
15922           int tag = input.readTag();
15923           switch (tag) {
15924             case 0:
15925               done = true;
15926               break;
15927             default: {
15928               if (!parseUnknownField(input, unknownFields,
15929                                      extensionRegistry, tag)) {
15930                 done = true;
15931               }
15932               break;
15933             }
15934           }
15935         }
15936       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15937         throw e.setUnfinishedMessage(this);
15938       } catch (java.io.IOException e) {
15939         throw new com.google.protobuf.InvalidProtocolBufferException(
15940             e.getMessage()).setUnfinishedMessage(this);
15941       } finally {
15942         this.unknownFields = unknownFields.build();
15943         makeExtensionsImmutable();
15944       }
15945     }
15946     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15947         getDescriptor() {
15948       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor;
15949     }
15950 
15951     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15952         internalGetFieldAccessorTable() {
15953       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_fieldAccessorTable
15954           .ensureFieldAccessorsInitialized(
15955               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.Builder.class);
15956     }
15957 
15958     public static com.google.protobuf.Parser<ModifyTableResponse> PARSER =
15959         new com.google.protobuf.AbstractParser<ModifyTableResponse>() {
15960       public ModifyTableResponse parsePartialFrom(
15961           com.google.protobuf.CodedInputStream input,
15962           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15963           throws com.google.protobuf.InvalidProtocolBufferException {
15964         return new ModifyTableResponse(input, extensionRegistry);
15965       }
15966     };
15967 
15968     @java.lang.Override
getParserForType()15969     public com.google.protobuf.Parser<ModifyTableResponse> getParserForType() {
15970       return PARSER;
15971     }
15972 
initFields()15973     private void initFields() {
15974     }
15975     private byte memoizedIsInitialized = -1;
isInitialized()15976     public final boolean isInitialized() {
15977       byte isInitialized = memoizedIsInitialized;
15978       if (isInitialized != -1) return isInitialized == 1;
15979 
15980       memoizedIsInitialized = 1;
15981       return true;
15982     }
15983 
writeTo(com.google.protobuf.CodedOutputStream output)15984     public void writeTo(com.google.protobuf.CodedOutputStream output)
15985                         throws java.io.IOException {
15986       getSerializedSize();
15987       getUnknownFields().writeTo(output);
15988     }
15989 
15990     private int memoizedSerializedSize = -1;
getSerializedSize()15991     public int getSerializedSize() {
15992       int size = memoizedSerializedSize;
15993       if (size != -1) return size;
15994 
15995       size = 0;
15996       size += getUnknownFields().getSerializedSize();
15997       memoizedSerializedSize = size;
15998       return size;
15999     }
16000 
16001     private static final long serialVersionUID = 0L;
16002     @java.lang.Override
writeReplace()16003     protected java.lang.Object writeReplace()
16004         throws java.io.ObjectStreamException {
16005       return super.writeReplace();
16006     }
16007 
16008     @java.lang.Override
equals(final java.lang.Object obj)16009     public boolean equals(final java.lang.Object obj) {
16010       if (obj == this) {
16011        return true;
16012       }
16013       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse)) {
16014         return super.equals(obj);
16015       }
16016       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) obj;
16017 
16018       boolean result = true;
16019       result = result &&
16020           getUnknownFields().equals(other.getUnknownFields());
16021       return result;
16022     }
16023 
16024     private int memoizedHashCode = 0;
16025     @java.lang.Override
hashCode()16026     public int hashCode() {
16027       if (memoizedHashCode != 0) {
16028         return memoizedHashCode;
16029       }
16030       int hash = 41;
16031       hash = (19 * hash) + getDescriptorForType().hashCode();
16032       hash = (29 * hash) + getUnknownFields().hashCode();
16033       memoizedHashCode = hash;
16034       return hash;
16035     }
16036 
parseFrom( com.google.protobuf.ByteString data)16037     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(
16038         com.google.protobuf.ByteString data)
16039         throws com.google.protobuf.InvalidProtocolBufferException {
16040       return PARSER.parseFrom(data);
16041     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16042     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(
16043         com.google.protobuf.ByteString data,
16044         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16045         throws com.google.protobuf.InvalidProtocolBufferException {
16046       return PARSER.parseFrom(data, extensionRegistry);
16047     }
parseFrom(byte[] data)16048     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(byte[] data)
16049         throws com.google.protobuf.InvalidProtocolBufferException {
16050       return PARSER.parseFrom(data);
16051     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16052     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(
16053         byte[] data,
16054         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16055         throws com.google.protobuf.InvalidProtocolBufferException {
16056       return PARSER.parseFrom(data, extensionRegistry);
16057     }
parseFrom(java.io.InputStream input)16058     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(java.io.InputStream input)
16059         throws java.io.IOException {
16060       return PARSER.parseFrom(input);
16061     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16062     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(
16063         java.io.InputStream input,
16064         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16065         throws java.io.IOException {
16066       return PARSER.parseFrom(input, extensionRegistry);
16067     }
parseDelimitedFrom(java.io.InputStream input)16068     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom(java.io.InputStream input)
16069         throws java.io.IOException {
16070       return PARSER.parseDelimitedFrom(input);
16071     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16072     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseDelimitedFrom(
16073         java.io.InputStream input,
16074         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16075         throws java.io.IOException {
16076       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16077     }
parseFrom( com.google.protobuf.CodedInputStream input)16078     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(
16079         com.google.protobuf.CodedInputStream input)
16080         throws java.io.IOException {
16081       return PARSER.parseFrom(input);
16082     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16083     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parseFrom(
16084         com.google.protobuf.CodedInputStream input,
16085         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16086         throws java.io.IOException {
16087       return PARSER.parseFrom(input, extensionRegistry);
16088     }
16089 
newBuilder()16090     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()16091     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse prototype)16092     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse prototype) {
16093       return newBuilder().mergeFrom(prototype);
16094     }
toBuilder()16095     public Builder toBuilder() { return newBuilder(this); }
16096 
16097     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)16098     protected Builder newBuilderForType(
16099         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16100       Builder builder = new Builder(parent);
16101       return builder;
16102     }
16103     /**
16104      * Protobuf type {@code ModifyTableResponse}
16105      */
16106     public static final class Builder extends
16107         com.google.protobuf.GeneratedMessage.Builder<Builder>
16108        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponseOrBuilder {
16109       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16110           getDescriptor() {
16111         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor;
16112       }
16113 
16114       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16115           internalGetFieldAccessorTable() {
16116         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_fieldAccessorTable
16117             .ensureFieldAccessorsInitialized(
16118                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.Builder.class);
16119       }
16120 
16121       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.newBuilder()
Builder()16122       private Builder() {
16123         maybeForceBuilderInitialization();
16124       }
16125 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)16126       private Builder(
16127           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16128         super(parent);
16129         maybeForceBuilderInitialization();
16130       }
maybeForceBuilderInitialization()16131       private void maybeForceBuilderInitialization() {
16132         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16133         }
16134       }
create()16135       private static Builder create() {
16136         return new Builder();
16137       }
16138 
clear()16139       public Builder clear() {
16140         super.clear();
16141         return this;
16142       }
16143 
clone()16144       public Builder clone() {
16145         return create().mergeFrom(buildPartial());
16146       }
16147 
16148       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()16149           getDescriptorForType() {
16150         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyTableResponse_descriptor;
16151       }
16152 
getDefaultInstanceForType()16153       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse getDefaultInstanceForType() {
16154         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance();
16155       }
16156 
build()16157       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse build() {
16158         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = buildPartial();
16159         if (!result.isInitialized()) {
16160           throw newUninitializedMessageException(result);
16161         }
16162         return result;
16163       }
16164 
buildPartial()16165       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse buildPartial() {
16166         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse(this);
16167         onBuilt();
16168         return result;
16169       }
16170 
mergeFrom(com.google.protobuf.Message other)16171       public Builder mergeFrom(com.google.protobuf.Message other) {
16172         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) {
16173           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse)other);
16174         } else {
16175           super.mergeFrom(other);
16176           return this;
16177         }
16178       }
16179 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other)16180       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse other) {
16181         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance()) return this;
16182         this.mergeUnknownFields(other.getUnknownFields());
16183         return this;
16184       }
16185 
isInitialized()16186       public final boolean isInitialized() {
16187         return true;
16188       }
16189 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16190       public Builder mergeFrom(
16191           com.google.protobuf.CodedInputStream input,
16192           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16193           throws java.io.IOException {
16194         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse parsedMessage = null;
16195         try {
16196           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16197         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16198           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) e.getUnfinishedMessage();
16199           throw e;
16200         } finally {
16201           if (parsedMessage != null) {
16202             mergeFrom(parsedMessage);
16203           }
16204         }
16205         return this;
16206       }
16207 
16208       // @@protoc_insertion_point(builder_scope:ModifyTableResponse)
16209     }
16210 
16211     static {
16212       defaultInstance = new ModifyTableResponse(true);
defaultInstance.initFields()16213       defaultInstance.initFields();
16214     }
16215 
16216     // @@protoc_insertion_point(class_scope:ModifyTableResponse)
16217   }
16218 
16219   public interface CreateNamespaceRequestOrBuilder
16220       extends com.google.protobuf.MessageOrBuilder {
16221 
16222     // required .NamespaceDescriptor namespaceDescriptor = 1;
16223     /**
16224      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16225      */
hasNamespaceDescriptor()16226     boolean hasNamespaceDescriptor();
16227     /**
16228      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16229      */
getNamespaceDescriptor()16230     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor();
16231     /**
16232      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16233      */
getNamespaceDescriptorOrBuilder()16234     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder();
16235   }
16236   /**
16237    * Protobuf type {@code CreateNamespaceRequest}
16238    */
16239   public static final class CreateNamespaceRequest extends
16240       com.google.protobuf.GeneratedMessage
16241       implements CreateNamespaceRequestOrBuilder {
16242     // Use CreateNamespaceRequest.newBuilder() to construct.
CreateNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)16243     private CreateNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16244       super(builder);
16245       this.unknownFields = builder.getUnknownFields();
16246     }
CreateNamespaceRequest(boolean noInit)16247     private CreateNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16248 
16249     private static final CreateNamespaceRequest defaultInstance;
getDefaultInstance()16250     public static CreateNamespaceRequest getDefaultInstance() {
16251       return defaultInstance;
16252     }
16253 
getDefaultInstanceForType()16254     public CreateNamespaceRequest getDefaultInstanceForType() {
16255       return defaultInstance;
16256     }
16257 
16258     private final com.google.protobuf.UnknownFieldSet unknownFields;
16259     @java.lang.Override
16260     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()16261         getUnknownFields() {
16262       return this.unknownFields;
16263     }
CreateNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16264     private CreateNamespaceRequest(
16265         com.google.protobuf.CodedInputStream input,
16266         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16267         throws com.google.protobuf.InvalidProtocolBufferException {
16268       initFields();
16269       int mutable_bitField0_ = 0;
16270       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16271           com.google.protobuf.UnknownFieldSet.newBuilder();
16272       try {
16273         boolean done = false;
16274         while (!done) {
16275           int tag = input.readTag();
16276           switch (tag) {
16277             case 0:
16278               done = true;
16279               break;
16280             default: {
16281               if (!parseUnknownField(input, unknownFields,
16282                                      extensionRegistry, tag)) {
16283                 done = true;
16284               }
16285               break;
16286             }
16287             case 10: {
16288               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder subBuilder = null;
16289               if (((bitField0_ & 0x00000001) == 0x00000001)) {
16290                 subBuilder = namespaceDescriptor_.toBuilder();
16291               }
16292               namespaceDescriptor_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry);
16293               if (subBuilder != null) {
16294                 subBuilder.mergeFrom(namespaceDescriptor_);
16295                 namespaceDescriptor_ = subBuilder.buildPartial();
16296               }
16297               bitField0_ |= 0x00000001;
16298               break;
16299             }
16300           }
16301         }
16302       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16303         throw e.setUnfinishedMessage(this);
16304       } catch (java.io.IOException e) {
16305         throw new com.google.protobuf.InvalidProtocolBufferException(
16306             e.getMessage()).setUnfinishedMessage(this);
16307       } finally {
16308         this.unknownFields = unknownFields.build();
16309         makeExtensionsImmutable();
16310       }
16311     }
16312     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16313         getDescriptor() {
16314       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_descriptor;
16315     }
16316 
16317     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16318         internalGetFieldAccessorTable() {
16319       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_fieldAccessorTable
16320           .ensureFieldAccessorsInitialized(
16321               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.Builder.class);
16322     }
16323 
16324     public static com.google.protobuf.Parser<CreateNamespaceRequest> PARSER =
16325         new com.google.protobuf.AbstractParser<CreateNamespaceRequest>() {
16326       public CreateNamespaceRequest parsePartialFrom(
16327           com.google.protobuf.CodedInputStream input,
16328           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16329           throws com.google.protobuf.InvalidProtocolBufferException {
16330         return new CreateNamespaceRequest(input, extensionRegistry);
16331       }
16332     };
16333 
16334     @java.lang.Override
getParserForType()16335     public com.google.protobuf.Parser<CreateNamespaceRequest> getParserForType() {
16336       return PARSER;
16337     }
16338 
16339     private int bitField0_;
16340     // required .NamespaceDescriptor namespaceDescriptor = 1;
16341     public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1;
16342     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_;
16343     /**
16344      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16345      */
hasNamespaceDescriptor()16346     public boolean hasNamespaceDescriptor() {
16347       return ((bitField0_ & 0x00000001) == 0x00000001);
16348     }
16349     /**
16350      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16351      */
getNamespaceDescriptor()16352     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() {
16353       return namespaceDescriptor_;
16354     }
16355     /**
16356      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16357      */
getNamespaceDescriptorOrBuilder()16358     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() {
16359       return namespaceDescriptor_;
16360     }
16361 
initFields()16362     private void initFields() {
16363       namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
16364     }
16365     private byte memoizedIsInitialized = -1;
isInitialized()16366     public final boolean isInitialized() {
16367       byte isInitialized = memoizedIsInitialized;
16368       if (isInitialized != -1) return isInitialized == 1;
16369 
16370       if (!hasNamespaceDescriptor()) {
16371         memoizedIsInitialized = 0;
16372         return false;
16373       }
16374       if (!getNamespaceDescriptor().isInitialized()) {
16375         memoizedIsInitialized = 0;
16376         return false;
16377       }
16378       memoizedIsInitialized = 1;
16379       return true;
16380     }
16381 
writeTo(com.google.protobuf.CodedOutputStream output)16382     public void writeTo(com.google.protobuf.CodedOutputStream output)
16383                         throws java.io.IOException {
16384       getSerializedSize();
16385       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16386         output.writeMessage(1, namespaceDescriptor_);
16387       }
16388       getUnknownFields().writeTo(output);
16389     }
16390 
16391     private int memoizedSerializedSize = -1;
getSerializedSize()16392     public int getSerializedSize() {
16393       int size = memoizedSerializedSize;
16394       if (size != -1) return size;
16395 
16396       size = 0;
16397       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16398         size += com.google.protobuf.CodedOutputStream
16399           .computeMessageSize(1, namespaceDescriptor_);
16400       }
16401       size += getUnknownFields().getSerializedSize();
16402       memoizedSerializedSize = size;
16403       return size;
16404     }
16405 
16406     private static final long serialVersionUID = 0L;
16407     @java.lang.Override
writeReplace()16408     protected java.lang.Object writeReplace()
16409         throws java.io.ObjectStreamException {
16410       return super.writeReplace();
16411     }
16412 
16413     @java.lang.Override
equals(final java.lang.Object obj)16414     public boolean equals(final java.lang.Object obj) {
16415       if (obj == this) {
16416        return true;
16417       }
16418       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)) {
16419         return super.equals(obj);
16420       }
16421       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest) obj;
16422 
16423       boolean result = true;
16424       result = result && (hasNamespaceDescriptor() == other.hasNamespaceDescriptor());
16425       if (hasNamespaceDescriptor()) {
16426         result = result && getNamespaceDescriptor()
16427             .equals(other.getNamespaceDescriptor());
16428       }
16429       result = result &&
16430           getUnknownFields().equals(other.getUnknownFields());
16431       return result;
16432     }
16433 
16434     private int memoizedHashCode = 0;
16435     @java.lang.Override
hashCode()16436     public int hashCode() {
16437       if (memoizedHashCode != 0) {
16438         return memoizedHashCode;
16439       }
16440       int hash = 41;
16441       hash = (19 * hash) + getDescriptorForType().hashCode();
16442       if (hasNamespaceDescriptor()) {
16443         hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER;
16444         hash = (53 * hash) + getNamespaceDescriptor().hashCode();
16445       }
16446       hash = (29 * hash) + getUnknownFields().hashCode();
16447       memoizedHashCode = hash;
16448       return hash;
16449     }
16450 
parseFrom( com.google.protobuf.ByteString data)16451     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(
16452         com.google.protobuf.ByteString data)
16453         throws com.google.protobuf.InvalidProtocolBufferException {
16454       return PARSER.parseFrom(data);
16455     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16456     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(
16457         com.google.protobuf.ByteString data,
16458         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16459         throws com.google.protobuf.InvalidProtocolBufferException {
16460       return PARSER.parseFrom(data, extensionRegistry);
16461     }
parseFrom(byte[] data)16462     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(byte[] data)
16463         throws com.google.protobuf.InvalidProtocolBufferException {
16464       return PARSER.parseFrom(data);
16465     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16466     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(
16467         byte[] data,
16468         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16469         throws com.google.protobuf.InvalidProtocolBufferException {
16470       return PARSER.parseFrom(data, extensionRegistry);
16471     }
parseFrom(java.io.InputStream input)16472     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(java.io.InputStream input)
16473         throws java.io.IOException {
16474       return PARSER.parseFrom(input);
16475     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16476     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(
16477         java.io.InputStream input,
16478         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16479         throws java.io.IOException {
16480       return PARSER.parseFrom(input, extensionRegistry);
16481     }
parseDelimitedFrom(java.io.InputStream input)16482     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseDelimitedFrom(java.io.InputStream input)
16483         throws java.io.IOException {
16484       return PARSER.parseDelimitedFrom(input);
16485     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16486     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseDelimitedFrom(
16487         java.io.InputStream input,
16488         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16489         throws java.io.IOException {
16490       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16491     }
parseFrom( com.google.protobuf.CodedInputStream input)16492     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(
16493         com.google.protobuf.CodedInputStream input)
16494         throws java.io.IOException {
16495       return PARSER.parseFrom(input);
16496     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16497     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parseFrom(
16498         com.google.protobuf.CodedInputStream input,
16499         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16500         throws java.io.IOException {
16501       return PARSER.parseFrom(input, extensionRegistry);
16502     }
16503 
newBuilder()16504     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()16505     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest prototype)16506     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest prototype) {
16507       return newBuilder().mergeFrom(prototype);
16508     }
toBuilder()16509     public Builder toBuilder() { return newBuilder(this); }
16510 
16511     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)16512     protected Builder newBuilderForType(
16513         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16514       Builder builder = new Builder(parent);
16515       return builder;
16516     }
16517     /**
16518      * Protobuf type {@code CreateNamespaceRequest}
16519      */
16520     public static final class Builder extends
16521         com.google.protobuf.GeneratedMessage.Builder<Builder>
16522        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequestOrBuilder {
16523       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16524           getDescriptor() {
16525         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_descriptor;
16526       }
16527 
16528       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16529           internalGetFieldAccessorTable() {
16530         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_fieldAccessorTable
16531             .ensureFieldAccessorsInitialized(
16532                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.Builder.class);
16533       }
16534 
16535       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.newBuilder()
Builder()16536       private Builder() {
16537         maybeForceBuilderInitialization();
16538       }
16539 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)16540       private Builder(
16541           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16542         super(parent);
16543         maybeForceBuilderInitialization();
16544       }
maybeForceBuilderInitialization()16545       private void maybeForceBuilderInitialization() {
16546         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16547           getNamespaceDescriptorFieldBuilder();
16548         }
16549       }
create()16550       private static Builder create() {
16551         return new Builder();
16552       }
16553 
clear()16554       public Builder clear() {
16555         super.clear();
16556         if (namespaceDescriptorBuilder_ == null) {
16557           namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
16558         } else {
16559           namespaceDescriptorBuilder_.clear();
16560         }
16561         bitField0_ = (bitField0_ & ~0x00000001);
16562         return this;
16563       }
16564 
clone()16565       public Builder clone() {
16566         return create().mergeFrom(buildPartial());
16567       }
16568 
16569       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()16570           getDescriptorForType() {
16571         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceRequest_descriptor;
16572       }
16573 
getDefaultInstanceForType()16574       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest getDefaultInstanceForType() {
16575         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
16576       }
16577 
build()16578       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest build() {
16579         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest result = buildPartial();
16580         if (!result.isInitialized()) {
16581           throw newUninitializedMessageException(result);
16582         }
16583         return result;
16584       }
16585 
buildPartial()16586       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest buildPartial() {
16587         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest(this);
16588         int from_bitField0_ = bitField0_;
16589         int to_bitField0_ = 0;
16590         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
16591           to_bitField0_ |= 0x00000001;
16592         }
16593         if (namespaceDescriptorBuilder_ == null) {
16594           result.namespaceDescriptor_ = namespaceDescriptor_;
16595         } else {
16596           result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build();
16597         }
16598         result.bitField0_ = to_bitField0_;
16599         onBuilt();
16600         return result;
16601       }
16602 
mergeFrom(com.google.protobuf.Message other)16603       public Builder mergeFrom(com.google.protobuf.Message other) {
16604         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest) {
16605           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)other);
16606         } else {
16607           super.mergeFrom(other);
16608           return this;
16609         }
16610       }
16611 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest other)16612       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest other) {
16613         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance()) return this;
16614         if (other.hasNamespaceDescriptor()) {
16615           mergeNamespaceDescriptor(other.getNamespaceDescriptor());
16616         }
16617         this.mergeUnknownFields(other.getUnknownFields());
16618         return this;
16619       }
16620 
isInitialized()16621       public final boolean isInitialized() {
16622         if (!hasNamespaceDescriptor()) {
16623 
16624           return false;
16625         }
16626         if (!getNamespaceDescriptor().isInitialized()) {
16627 
16628           return false;
16629         }
16630         return true;
16631       }
16632 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16633       public Builder mergeFrom(
16634           com.google.protobuf.CodedInputStream input,
16635           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16636           throws java.io.IOException {
16637         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest parsedMessage = null;
16638         try {
16639           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16640         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16641           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest) e.getUnfinishedMessage();
16642           throw e;
16643         } finally {
16644           if (parsedMessage != null) {
16645             mergeFrom(parsedMessage);
16646           }
16647         }
16648         return this;
16649       }
16650       private int bitField0_;
16651 
16652       // required .NamespaceDescriptor namespaceDescriptor = 1;
16653       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
16654       private com.google.protobuf.SingleFieldBuilder<
16655           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_;
16656       /**
16657        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16658        */
hasNamespaceDescriptor()16659       public boolean hasNamespaceDescriptor() {
16660         return ((bitField0_ & 0x00000001) == 0x00000001);
16661       }
16662       /**
16663        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16664        */
getNamespaceDescriptor()16665       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() {
16666         if (namespaceDescriptorBuilder_ == null) {
16667           return namespaceDescriptor_;
16668         } else {
16669           return namespaceDescriptorBuilder_.getMessage();
16670         }
16671       }
16672       /**
16673        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16674        */
setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)16675       public Builder setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
16676         if (namespaceDescriptorBuilder_ == null) {
16677           if (value == null) {
16678             throw new NullPointerException();
16679           }
16680           namespaceDescriptor_ = value;
16681           onChanged();
16682         } else {
16683           namespaceDescriptorBuilder_.setMessage(value);
16684         }
16685         bitField0_ |= 0x00000001;
16686         return this;
16687       }
16688       /**
16689        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16690        */
setNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue)16691       public Builder setNamespaceDescriptor(
16692           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) {
16693         if (namespaceDescriptorBuilder_ == null) {
16694           namespaceDescriptor_ = builderForValue.build();
16695           onChanged();
16696         } else {
16697           namespaceDescriptorBuilder_.setMessage(builderForValue.build());
16698         }
16699         bitField0_ |= 0x00000001;
16700         return this;
16701       }
16702       /**
16703        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16704        */
mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)16705       public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
16706         if (namespaceDescriptorBuilder_ == null) {
16707           if (((bitField0_ & 0x00000001) == 0x00000001) &&
16708               namespaceDescriptor_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) {
16709             namespaceDescriptor_ =
16710               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial();
16711           } else {
16712             namespaceDescriptor_ = value;
16713           }
16714           onChanged();
16715         } else {
16716           namespaceDescriptorBuilder_.mergeFrom(value);
16717         }
16718         bitField0_ |= 0x00000001;
16719         return this;
16720       }
16721       /**
16722        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16723        */
clearNamespaceDescriptor()16724       public Builder clearNamespaceDescriptor() {
16725         if (namespaceDescriptorBuilder_ == null) {
16726           namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
16727           onChanged();
16728         } else {
16729           namespaceDescriptorBuilder_.clear();
16730         }
16731         bitField0_ = (bitField0_ & ~0x00000001);
16732         return this;
16733       }
16734       /**
16735        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16736        */
getNamespaceDescriptorBuilder()16737       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder() {
16738         bitField0_ |= 0x00000001;
16739         onChanged();
16740         return getNamespaceDescriptorFieldBuilder().getBuilder();
16741       }
16742       /**
16743        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16744        */
getNamespaceDescriptorOrBuilder()16745       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() {
16746         if (namespaceDescriptorBuilder_ != null) {
16747           return namespaceDescriptorBuilder_.getMessageOrBuilder();
16748         } else {
16749           return namespaceDescriptor_;
16750         }
16751       }
16752       /**
16753        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
16754        */
16755       private com.google.protobuf.SingleFieldBuilder<
16756           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorFieldBuilder()16757           getNamespaceDescriptorFieldBuilder() {
16758         if (namespaceDescriptorBuilder_ == null) {
16759           namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
16760               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>(
16761                   namespaceDescriptor_,
16762                   getParentForChildren(),
16763                   isClean());
16764           namespaceDescriptor_ = null;
16765         }
16766         return namespaceDescriptorBuilder_;
16767       }
16768 
16769       // @@protoc_insertion_point(builder_scope:CreateNamespaceRequest)
16770     }
16771 
16772     static {
16773       defaultInstance = new CreateNamespaceRequest(true);
defaultInstance.initFields()16774       defaultInstance.initFields();
16775     }
16776 
16777     // @@protoc_insertion_point(class_scope:CreateNamespaceRequest)
16778   }
16779 
16780   public interface CreateNamespaceResponseOrBuilder
16781       extends com.google.protobuf.MessageOrBuilder {
16782   }
16783   /**
16784    * Protobuf type {@code CreateNamespaceResponse}
16785    */
16786   public static final class CreateNamespaceResponse extends
16787       com.google.protobuf.GeneratedMessage
16788       implements CreateNamespaceResponseOrBuilder {
16789     // Use CreateNamespaceResponse.newBuilder() to construct.
CreateNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)16790     private CreateNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16791       super(builder);
16792       this.unknownFields = builder.getUnknownFields();
16793     }
CreateNamespaceResponse(boolean noInit)16794     private CreateNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16795 
16796     private static final CreateNamespaceResponse defaultInstance;
getDefaultInstance()16797     public static CreateNamespaceResponse getDefaultInstance() {
16798       return defaultInstance;
16799     }
16800 
getDefaultInstanceForType()16801     public CreateNamespaceResponse getDefaultInstanceForType() {
16802       return defaultInstance;
16803     }
16804 
16805     private final com.google.protobuf.UnknownFieldSet unknownFields;
16806     @java.lang.Override
16807     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()16808         getUnknownFields() {
16809       return this.unknownFields;
16810     }
CreateNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16811     private CreateNamespaceResponse(
16812         com.google.protobuf.CodedInputStream input,
16813         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16814         throws com.google.protobuf.InvalidProtocolBufferException {
16815       initFields();
16816       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16817           com.google.protobuf.UnknownFieldSet.newBuilder();
16818       try {
16819         boolean done = false;
16820         while (!done) {
16821           int tag = input.readTag();
16822           switch (tag) {
16823             case 0:
16824               done = true;
16825               break;
16826             default: {
16827               if (!parseUnknownField(input, unknownFields,
16828                                      extensionRegistry, tag)) {
16829                 done = true;
16830               }
16831               break;
16832             }
16833           }
16834         }
16835       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16836         throw e.setUnfinishedMessage(this);
16837       } catch (java.io.IOException e) {
16838         throw new com.google.protobuf.InvalidProtocolBufferException(
16839             e.getMessage()).setUnfinishedMessage(this);
16840       } finally {
16841         this.unknownFields = unknownFields.build();
16842         makeExtensionsImmutable();
16843       }
16844     }
16845     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16846         getDescriptor() {
16847       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_descriptor;
16848     }
16849 
16850     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16851         internalGetFieldAccessorTable() {
16852       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_fieldAccessorTable
16853           .ensureFieldAccessorsInitialized(
16854               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.Builder.class);
16855     }
16856 
16857     public static com.google.protobuf.Parser<CreateNamespaceResponse> PARSER =
16858         new com.google.protobuf.AbstractParser<CreateNamespaceResponse>() {
16859       public CreateNamespaceResponse parsePartialFrom(
16860           com.google.protobuf.CodedInputStream input,
16861           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16862           throws com.google.protobuf.InvalidProtocolBufferException {
16863         return new CreateNamespaceResponse(input, extensionRegistry);
16864       }
16865     };
16866 
16867     @java.lang.Override
getParserForType()16868     public com.google.protobuf.Parser<CreateNamespaceResponse> getParserForType() {
16869       return PARSER;
16870     }
16871 
initFields()16872     private void initFields() {
16873     }
16874     private byte memoizedIsInitialized = -1;
isInitialized()16875     public final boolean isInitialized() {
16876       byte isInitialized = memoizedIsInitialized;
16877       if (isInitialized != -1) return isInitialized == 1;
16878 
16879       memoizedIsInitialized = 1;
16880       return true;
16881     }
16882 
writeTo(com.google.protobuf.CodedOutputStream output)16883     public void writeTo(com.google.protobuf.CodedOutputStream output)
16884                         throws java.io.IOException {
16885       getSerializedSize();
16886       getUnknownFields().writeTo(output);
16887     }
16888 
16889     private int memoizedSerializedSize = -1;
getSerializedSize()16890     public int getSerializedSize() {
16891       int size = memoizedSerializedSize;
16892       if (size != -1) return size;
16893 
16894       size = 0;
16895       size += getUnknownFields().getSerializedSize();
16896       memoizedSerializedSize = size;
16897       return size;
16898     }
16899 
16900     private static final long serialVersionUID = 0L;
16901     @java.lang.Override
writeReplace()16902     protected java.lang.Object writeReplace()
16903         throws java.io.ObjectStreamException {
16904       return super.writeReplace();
16905     }
16906 
16907     @java.lang.Override
equals(final java.lang.Object obj)16908     public boolean equals(final java.lang.Object obj) {
16909       if (obj == this) {
16910        return true;
16911       }
16912       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse)) {
16913         return super.equals(obj);
16914       }
16915       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) obj;
16916 
16917       boolean result = true;
16918       result = result &&
16919           getUnknownFields().equals(other.getUnknownFields());
16920       return result;
16921     }
16922 
16923     private int memoizedHashCode = 0;
16924     @java.lang.Override
hashCode()16925     public int hashCode() {
16926       if (memoizedHashCode != 0) {
16927         return memoizedHashCode;
16928       }
16929       int hash = 41;
16930       hash = (19 * hash) + getDescriptorForType().hashCode();
16931       hash = (29 * hash) + getUnknownFields().hashCode();
16932       memoizedHashCode = hash;
16933       return hash;
16934     }
16935 
parseFrom( com.google.protobuf.ByteString data)16936     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(
16937         com.google.protobuf.ByteString data)
16938         throws com.google.protobuf.InvalidProtocolBufferException {
16939       return PARSER.parseFrom(data);
16940     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16941     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(
16942         com.google.protobuf.ByteString data,
16943         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16944         throws com.google.protobuf.InvalidProtocolBufferException {
16945       return PARSER.parseFrom(data, extensionRegistry);
16946     }
parseFrom(byte[] data)16947     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(byte[] data)
16948         throws com.google.protobuf.InvalidProtocolBufferException {
16949       return PARSER.parseFrom(data);
16950     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16951     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(
16952         byte[] data,
16953         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16954         throws com.google.protobuf.InvalidProtocolBufferException {
16955       return PARSER.parseFrom(data, extensionRegistry);
16956     }
parseFrom(java.io.InputStream input)16957     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(java.io.InputStream input)
16958         throws java.io.IOException {
16959       return PARSER.parseFrom(input);
16960     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16961     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(
16962         java.io.InputStream input,
16963         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16964         throws java.io.IOException {
16965       return PARSER.parseFrom(input, extensionRegistry);
16966     }
parseDelimitedFrom(java.io.InputStream input)16967     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseDelimitedFrom(java.io.InputStream input)
16968         throws java.io.IOException {
16969       return PARSER.parseDelimitedFrom(input);
16970     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16971     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseDelimitedFrom(
16972         java.io.InputStream input,
16973         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16974         throws java.io.IOException {
16975       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16976     }
parseFrom( com.google.protobuf.CodedInputStream input)16977     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(
16978         com.google.protobuf.CodedInputStream input)
16979         throws java.io.IOException {
16980       return PARSER.parseFrom(input);
16981     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16982     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parseFrom(
16983         com.google.protobuf.CodedInputStream input,
16984         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16985         throws java.io.IOException {
16986       return PARSER.parseFrom(input, extensionRegistry);
16987     }
16988 
newBuilder()16989     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()16990     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse prototype)16991     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse prototype) {
16992       return newBuilder().mergeFrom(prototype);
16993     }
toBuilder()16994     public Builder toBuilder() { return newBuilder(this); }
16995 
16996     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)16997     protected Builder newBuilderForType(
16998         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16999       Builder builder = new Builder(parent);
17000       return builder;
17001     }
17002     /**
17003      * Protobuf type {@code CreateNamespaceResponse}
17004      */
17005     public static final class Builder extends
17006         com.google.protobuf.GeneratedMessage.Builder<Builder>
17007        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponseOrBuilder {
17008       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17009           getDescriptor() {
17010         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_descriptor;
17011       }
17012 
17013       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17014           internalGetFieldAccessorTable() {
17015         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_fieldAccessorTable
17016             .ensureFieldAccessorsInitialized(
17017                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.Builder.class);
17018       }
17019 
17020       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.newBuilder()
Builder()17021       private Builder() {
17022         maybeForceBuilderInitialization();
17023       }
17024 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)17025       private Builder(
17026           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17027         super(parent);
17028         maybeForceBuilderInitialization();
17029       }
maybeForceBuilderInitialization()17030       private void maybeForceBuilderInitialization() {
17031         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17032         }
17033       }
create()17034       private static Builder create() {
17035         return new Builder();
17036       }
17037 
clear()17038       public Builder clear() {
17039         super.clear();
17040         return this;
17041       }
17042 
clone()17043       public Builder clone() {
17044         return create().mergeFrom(buildPartial());
17045       }
17046 
17047       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()17048           getDescriptorForType() {
17049         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_CreateNamespaceResponse_descriptor;
17050       }
17051 
getDefaultInstanceForType()17052       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse getDefaultInstanceForType() {
17053         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
17054       }
17055 
build()17056       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse build() {
17057         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse result = buildPartial();
17058         if (!result.isInitialized()) {
17059           throw newUninitializedMessageException(result);
17060         }
17061         return result;
17062       }
17063 
buildPartial()17064       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse buildPartial() {
17065         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse(this);
17066         onBuilt();
17067         return result;
17068       }
17069 
mergeFrom(com.google.protobuf.Message other)17070       public Builder mergeFrom(com.google.protobuf.Message other) {
17071         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) {
17072           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse)other);
17073         } else {
17074           super.mergeFrom(other);
17075           return this;
17076         }
17077       }
17078 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse other)17079       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse other) {
17080         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance()) return this;
17081         this.mergeUnknownFields(other.getUnknownFields());
17082         return this;
17083       }
17084 
isInitialized()17085       public final boolean isInitialized() {
17086         return true;
17087       }
17088 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17089       public Builder mergeFrom(
17090           com.google.protobuf.CodedInputStream input,
17091           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17092           throws java.io.IOException {
17093         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse parsedMessage = null;
17094         try {
17095           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17096         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17097           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) e.getUnfinishedMessage();
17098           throw e;
17099         } finally {
17100           if (parsedMessage != null) {
17101             mergeFrom(parsedMessage);
17102           }
17103         }
17104         return this;
17105       }
17106 
17107       // @@protoc_insertion_point(builder_scope:CreateNamespaceResponse)
17108     }
17109 
17110     static {
17111       defaultInstance = new CreateNamespaceResponse(true);
defaultInstance.initFields()17112       defaultInstance.initFields();
17113     }
17114 
17115     // @@protoc_insertion_point(class_scope:CreateNamespaceResponse)
17116   }
17117 
17118   public interface DeleteNamespaceRequestOrBuilder
17119       extends com.google.protobuf.MessageOrBuilder {
17120 
17121     // required string namespaceName = 1;
17122     /**
17123      * <code>required string namespaceName = 1;</code>
17124      */
hasNamespaceName()17125     boolean hasNamespaceName();
17126     /**
17127      * <code>required string namespaceName = 1;</code>
17128      */
getNamespaceName()17129     java.lang.String getNamespaceName();
17130     /**
17131      * <code>required string namespaceName = 1;</code>
17132      */
17133     com.google.protobuf.ByteString
getNamespaceNameBytes()17134         getNamespaceNameBytes();
17135   }
17136   /**
17137    * Protobuf type {@code DeleteNamespaceRequest}
17138    */
17139   public static final class DeleteNamespaceRequest extends
17140       com.google.protobuf.GeneratedMessage
17141       implements DeleteNamespaceRequestOrBuilder {
17142     // Use DeleteNamespaceRequest.newBuilder() to construct.
DeleteNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)17143     private DeleteNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17144       super(builder);
17145       this.unknownFields = builder.getUnknownFields();
17146     }
DeleteNamespaceRequest(boolean noInit)17147     private DeleteNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17148 
17149     private static final DeleteNamespaceRequest defaultInstance;
getDefaultInstance()17150     public static DeleteNamespaceRequest getDefaultInstance() {
17151       return defaultInstance;
17152     }
17153 
getDefaultInstanceForType()17154     public DeleteNamespaceRequest getDefaultInstanceForType() {
17155       return defaultInstance;
17156     }
17157 
17158     private final com.google.protobuf.UnknownFieldSet unknownFields;
17159     @java.lang.Override
17160     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()17161         getUnknownFields() {
17162       return this.unknownFields;
17163     }
DeleteNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17164     private DeleteNamespaceRequest(
17165         com.google.protobuf.CodedInputStream input,
17166         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17167         throws com.google.protobuf.InvalidProtocolBufferException {
17168       initFields();
17169       int mutable_bitField0_ = 0;
17170       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17171           com.google.protobuf.UnknownFieldSet.newBuilder();
17172       try {
17173         boolean done = false;
17174         while (!done) {
17175           int tag = input.readTag();
17176           switch (tag) {
17177             case 0:
17178               done = true;
17179               break;
17180             default: {
17181               if (!parseUnknownField(input, unknownFields,
17182                                      extensionRegistry, tag)) {
17183                 done = true;
17184               }
17185               break;
17186             }
17187             case 10: {
17188               bitField0_ |= 0x00000001;
17189               namespaceName_ = input.readBytes();
17190               break;
17191             }
17192           }
17193         }
17194       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17195         throw e.setUnfinishedMessage(this);
17196       } catch (java.io.IOException e) {
17197         throw new com.google.protobuf.InvalidProtocolBufferException(
17198             e.getMessage()).setUnfinishedMessage(this);
17199       } finally {
17200         this.unknownFields = unknownFields.build();
17201         makeExtensionsImmutable();
17202       }
17203     }
17204     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17205         getDescriptor() {
17206       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_descriptor;
17207     }
17208 
17209     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17210         internalGetFieldAccessorTable() {
17211       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_fieldAccessorTable
17212           .ensureFieldAccessorsInitialized(
17213               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.Builder.class);
17214     }
17215 
17216     public static com.google.protobuf.Parser<DeleteNamespaceRequest> PARSER =
17217         new com.google.protobuf.AbstractParser<DeleteNamespaceRequest>() {
17218       public DeleteNamespaceRequest parsePartialFrom(
17219           com.google.protobuf.CodedInputStream input,
17220           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17221           throws com.google.protobuf.InvalidProtocolBufferException {
17222         return new DeleteNamespaceRequest(input, extensionRegistry);
17223       }
17224     };
17225 
17226     @java.lang.Override
getParserForType()17227     public com.google.protobuf.Parser<DeleteNamespaceRequest> getParserForType() {
17228       return PARSER;
17229     }
17230 
17231     private int bitField0_;
17232     // required string namespaceName = 1;
17233     public static final int NAMESPACENAME_FIELD_NUMBER = 1;
17234     private java.lang.Object namespaceName_;
17235     /**
17236      * <code>required string namespaceName = 1;</code>
17237      */
hasNamespaceName()17238     public boolean hasNamespaceName() {
17239       return ((bitField0_ & 0x00000001) == 0x00000001);
17240     }
17241     /**
17242      * <code>required string namespaceName = 1;</code>
17243      */
getNamespaceName()17244     public java.lang.String getNamespaceName() {
17245       java.lang.Object ref = namespaceName_;
17246       if (ref instanceof java.lang.String) {
17247         return (java.lang.String) ref;
17248       } else {
17249         com.google.protobuf.ByteString bs =
17250             (com.google.protobuf.ByteString) ref;
17251         java.lang.String s = bs.toStringUtf8();
17252         if (bs.isValidUtf8()) {
17253           namespaceName_ = s;
17254         }
17255         return s;
17256       }
17257     }
17258     /**
17259      * <code>required string namespaceName = 1;</code>
17260      */
17261     public com.google.protobuf.ByteString
getNamespaceNameBytes()17262         getNamespaceNameBytes() {
17263       java.lang.Object ref = namespaceName_;
17264       if (ref instanceof java.lang.String) {
17265         com.google.protobuf.ByteString b =
17266             com.google.protobuf.ByteString.copyFromUtf8(
17267                 (java.lang.String) ref);
17268         namespaceName_ = b;
17269         return b;
17270       } else {
17271         return (com.google.protobuf.ByteString) ref;
17272       }
17273     }
17274 
initFields()17275     private void initFields() {
17276       namespaceName_ = "";
17277     }
17278     private byte memoizedIsInitialized = -1;
isInitialized()17279     public final boolean isInitialized() {
17280       byte isInitialized = memoizedIsInitialized;
17281       if (isInitialized != -1) return isInitialized == 1;
17282 
17283       if (!hasNamespaceName()) {
17284         memoizedIsInitialized = 0;
17285         return false;
17286       }
17287       memoizedIsInitialized = 1;
17288       return true;
17289     }
17290 
writeTo(com.google.protobuf.CodedOutputStream output)17291     public void writeTo(com.google.protobuf.CodedOutputStream output)
17292                         throws java.io.IOException {
17293       getSerializedSize();
17294       if (((bitField0_ & 0x00000001) == 0x00000001)) {
17295         output.writeBytes(1, getNamespaceNameBytes());
17296       }
17297       getUnknownFields().writeTo(output);
17298     }
17299 
17300     private int memoizedSerializedSize = -1;
getSerializedSize()17301     public int getSerializedSize() {
17302       int size = memoizedSerializedSize;
17303       if (size != -1) return size;
17304 
17305       size = 0;
17306       if (((bitField0_ & 0x00000001) == 0x00000001)) {
17307         size += com.google.protobuf.CodedOutputStream
17308           .computeBytesSize(1, getNamespaceNameBytes());
17309       }
17310       size += getUnknownFields().getSerializedSize();
17311       memoizedSerializedSize = size;
17312       return size;
17313     }
17314 
17315     private static final long serialVersionUID = 0L;
17316     @java.lang.Override
writeReplace()17317     protected java.lang.Object writeReplace()
17318         throws java.io.ObjectStreamException {
17319       return super.writeReplace();
17320     }
17321 
17322     @java.lang.Override
equals(final java.lang.Object obj)17323     public boolean equals(final java.lang.Object obj) {
17324       if (obj == this) {
17325        return true;
17326       }
17327       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)) {
17328         return super.equals(obj);
17329       }
17330       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest) obj;
17331 
17332       boolean result = true;
17333       result = result && (hasNamespaceName() == other.hasNamespaceName());
17334       if (hasNamespaceName()) {
17335         result = result && getNamespaceName()
17336             .equals(other.getNamespaceName());
17337       }
17338       result = result &&
17339           getUnknownFields().equals(other.getUnknownFields());
17340       return result;
17341     }
17342 
17343     private int memoizedHashCode = 0;
17344     @java.lang.Override
hashCode()17345     public int hashCode() {
17346       if (memoizedHashCode != 0) {
17347         return memoizedHashCode;
17348       }
17349       int hash = 41;
17350       hash = (19 * hash) + getDescriptorForType().hashCode();
17351       if (hasNamespaceName()) {
17352         hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER;
17353         hash = (53 * hash) + getNamespaceName().hashCode();
17354       }
17355       hash = (29 * hash) + getUnknownFields().hashCode();
17356       memoizedHashCode = hash;
17357       return hash;
17358     }
17359 
parseFrom( com.google.protobuf.ByteString data)17360     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(
17361         com.google.protobuf.ByteString data)
17362         throws com.google.protobuf.InvalidProtocolBufferException {
17363       return PARSER.parseFrom(data);
17364     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17365     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(
17366         com.google.protobuf.ByteString data,
17367         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17368         throws com.google.protobuf.InvalidProtocolBufferException {
17369       return PARSER.parseFrom(data, extensionRegistry);
17370     }
parseFrom(byte[] data)17371     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(byte[] data)
17372         throws com.google.protobuf.InvalidProtocolBufferException {
17373       return PARSER.parseFrom(data);
17374     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17375     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(
17376         byte[] data,
17377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17378         throws com.google.protobuf.InvalidProtocolBufferException {
17379       return PARSER.parseFrom(data, extensionRegistry);
17380     }
parseFrom(java.io.InputStream input)17381     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(java.io.InputStream input)
17382         throws java.io.IOException {
17383       return PARSER.parseFrom(input);
17384     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17385     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(
17386         java.io.InputStream input,
17387         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17388         throws java.io.IOException {
17389       return PARSER.parseFrom(input, extensionRegistry);
17390     }
parseDelimitedFrom(java.io.InputStream input)17391     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseDelimitedFrom(java.io.InputStream input)
17392         throws java.io.IOException {
17393       return PARSER.parseDelimitedFrom(input);
17394     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17395     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseDelimitedFrom(
17396         java.io.InputStream input,
17397         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17398         throws java.io.IOException {
17399       return PARSER.parseDelimitedFrom(input, extensionRegistry);
17400     }
parseFrom( com.google.protobuf.CodedInputStream input)17401     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(
17402         com.google.protobuf.CodedInputStream input)
17403         throws java.io.IOException {
17404       return PARSER.parseFrom(input);
17405     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17406     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parseFrom(
17407         com.google.protobuf.CodedInputStream input,
17408         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17409         throws java.io.IOException {
17410       return PARSER.parseFrom(input, extensionRegistry);
17411     }
17412 
newBuilder()17413     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()17414     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest prototype)17415     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest prototype) {
17416       return newBuilder().mergeFrom(prototype);
17417     }
toBuilder()17418     public Builder toBuilder() { return newBuilder(this); }
17419 
17420     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)17421     protected Builder newBuilderForType(
17422         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17423       Builder builder = new Builder(parent);
17424       return builder;
17425     }
17426     /**
17427      * Protobuf type {@code DeleteNamespaceRequest}
17428      */
17429     public static final class Builder extends
17430         com.google.protobuf.GeneratedMessage.Builder<Builder>
17431        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequestOrBuilder {
17432       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17433           getDescriptor() {
17434         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_descriptor;
17435       }
17436 
17437       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17438           internalGetFieldAccessorTable() {
17439         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_fieldAccessorTable
17440             .ensureFieldAccessorsInitialized(
17441                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.Builder.class);
17442       }
17443 
17444       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.newBuilder()
Builder()17445       private Builder() {
17446         maybeForceBuilderInitialization();
17447       }
17448 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)17449       private Builder(
17450           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17451         super(parent);
17452         maybeForceBuilderInitialization();
17453       }
maybeForceBuilderInitialization()17454       private void maybeForceBuilderInitialization() {
17455         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17456         }
17457       }
create()17458       private static Builder create() {
17459         return new Builder();
17460       }
17461 
clear()17462       public Builder clear() {
17463         super.clear();
17464         namespaceName_ = "";
17465         bitField0_ = (bitField0_ & ~0x00000001);
17466         return this;
17467       }
17468 
clone()17469       public Builder clone() {
17470         return create().mergeFrom(buildPartial());
17471       }
17472 
17473       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()17474           getDescriptorForType() {
17475         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceRequest_descriptor;
17476       }
17477 
getDefaultInstanceForType()17478       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest getDefaultInstanceForType() {
17479         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
17480       }
17481 
build()17482       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest build() {
17483         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest result = buildPartial();
17484         if (!result.isInitialized()) {
17485           throw newUninitializedMessageException(result);
17486         }
17487         return result;
17488       }
17489 
buildPartial()17490       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest buildPartial() {
17491         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest(this);
17492         int from_bitField0_ = bitField0_;
17493         int to_bitField0_ = 0;
17494         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
17495           to_bitField0_ |= 0x00000001;
17496         }
17497         result.namespaceName_ = namespaceName_;
17498         result.bitField0_ = to_bitField0_;
17499         onBuilt();
17500         return result;
17501       }
17502 
mergeFrom(com.google.protobuf.Message other)17503       public Builder mergeFrom(com.google.protobuf.Message other) {
17504         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest) {
17505           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)other);
17506         } else {
17507           super.mergeFrom(other);
17508           return this;
17509         }
17510       }
17511 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest other)17512       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest other) {
17513         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance()) return this;
17514         if (other.hasNamespaceName()) {
17515           bitField0_ |= 0x00000001;
17516           namespaceName_ = other.namespaceName_;
17517           onChanged();
17518         }
17519         this.mergeUnknownFields(other.getUnknownFields());
17520         return this;
17521       }
17522 
isInitialized()17523       public final boolean isInitialized() {
17524         if (!hasNamespaceName()) {
17525 
17526           return false;
17527         }
17528         return true;
17529       }
17530 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17531       public Builder mergeFrom(
17532           com.google.protobuf.CodedInputStream input,
17533           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17534           throws java.io.IOException {
17535         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest parsedMessage = null;
17536         try {
17537           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17538         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17539           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest) e.getUnfinishedMessage();
17540           throw e;
17541         } finally {
17542           if (parsedMessage != null) {
17543             mergeFrom(parsedMessage);
17544           }
17545         }
17546         return this;
17547       }
17548       private int bitField0_;
17549 
17550       // required string namespaceName = 1;
17551       private java.lang.Object namespaceName_ = "";
17552       /**
17553        * <code>required string namespaceName = 1;</code>
17554        */
hasNamespaceName()17555       public boolean hasNamespaceName() {
17556         return ((bitField0_ & 0x00000001) == 0x00000001);
17557       }
17558       /**
17559        * <code>required string namespaceName = 1;</code>
17560        */
getNamespaceName()17561       public java.lang.String getNamespaceName() {
17562         java.lang.Object ref = namespaceName_;
17563         if (!(ref instanceof java.lang.String)) {
17564           java.lang.String s = ((com.google.protobuf.ByteString) ref)
17565               .toStringUtf8();
17566           namespaceName_ = s;
17567           return s;
17568         } else {
17569           return (java.lang.String) ref;
17570         }
17571       }
17572       /**
17573        * <code>required string namespaceName = 1;</code>
17574        */
17575       public com.google.protobuf.ByteString
getNamespaceNameBytes()17576           getNamespaceNameBytes() {
17577         java.lang.Object ref = namespaceName_;
17578         if (ref instanceof String) {
17579           com.google.protobuf.ByteString b =
17580               com.google.protobuf.ByteString.copyFromUtf8(
17581                   (java.lang.String) ref);
17582           namespaceName_ = b;
17583           return b;
17584         } else {
17585           return (com.google.protobuf.ByteString) ref;
17586         }
17587       }
17588       /**
17589        * <code>required string namespaceName = 1;</code>
17590        */
setNamespaceName( java.lang.String value)17591       public Builder setNamespaceName(
17592           java.lang.String value) {
17593         if (value == null) {
17594     throw new NullPointerException();
17595   }
17596   bitField0_ |= 0x00000001;
17597         namespaceName_ = value;
17598         onChanged();
17599         return this;
17600       }
17601       /**
17602        * <code>required string namespaceName = 1;</code>
17603        */
clearNamespaceName()17604       public Builder clearNamespaceName() {
17605         bitField0_ = (bitField0_ & ~0x00000001);
17606         namespaceName_ = getDefaultInstance().getNamespaceName();
17607         onChanged();
17608         return this;
17609       }
17610       /**
17611        * <code>required string namespaceName = 1;</code>
17612        */
setNamespaceNameBytes( com.google.protobuf.ByteString value)17613       public Builder setNamespaceNameBytes(
17614           com.google.protobuf.ByteString value) {
17615         if (value == null) {
17616     throw new NullPointerException();
17617   }
17618   bitField0_ |= 0x00000001;
17619         namespaceName_ = value;
17620         onChanged();
17621         return this;
17622       }
17623 
17624       // @@protoc_insertion_point(builder_scope:DeleteNamespaceRequest)
17625     }
17626 
17627     static {
17628       defaultInstance = new DeleteNamespaceRequest(true);
defaultInstance.initFields()17629       defaultInstance.initFields();
17630     }
17631 
17632     // @@protoc_insertion_point(class_scope:DeleteNamespaceRequest)
17633   }
17634 
17635   public interface DeleteNamespaceResponseOrBuilder
17636       extends com.google.protobuf.MessageOrBuilder {
17637   }
17638   /**
17639    * Protobuf type {@code DeleteNamespaceResponse}
17640    */
17641   public static final class DeleteNamespaceResponse extends
17642       com.google.protobuf.GeneratedMessage
17643       implements DeleteNamespaceResponseOrBuilder {
17644     // Use DeleteNamespaceResponse.newBuilder() to construct.
DeleteNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)17645     private DeleteNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17646       super(builder);
17647       this.unknownFields = builder.getUnknownFields();
17648     }
DeleteNamespaceResponse(boolean noInit)17649     private DeleteNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17650 
17651     private static final DeleteNamespaceResponse defaultInstance;
getDefaultInstance()17652     public static DeleteNamespaceResponse getDefaultInstance() {
17653       return defaultInstance;
17654     }
17655 
getDefaultInstanceForType()17656     public DeleteNamespaceResponse getDefaultInstanceForType() {
17657       return defaultInstance;
17658     }
17659 
17660     private final com.google.protobuf.UnknownFieldSet unknownFields;
17661     @java.lang.Override
17662     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()17663         getUnknownFields() {
17664       return this.unknownFields;
17665     }
DeleteNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17666     private DeleteNamespaceResponse(
17667         com.google.protobuf.CodedInputStream input,
17668         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17669         throws com.google.protobuf.InvalidProtocolBufferException {
17670       initFields();
17671       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17672           com.google.protobuf.UnknownFieldSet.newBuilder();
17673       try {
17674         boolean done = false;
17675         while (!done) {
17676           int tag = input.readTag();
17677           switch (tag) {
17678             case 0:
17679               done = true;
17680               break;
17681             default: {
17682               if (!parseUnknownField(input, unknownFields,
17683                                      extensionRegistry, tag)) {
17684                 done = true;
17685               }
17686               break;
17687             }
17688           }
17689         }
17690       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17691         throw e.setUnfinishedMessage(this);
17692       } catch (java.io.IOException e) {
17693         throw new com.google.protobuf.InvalidProtocolBufferException(
17694             e.getMessage()).setUnfinishedMessage(this);
17695       } finally {
17696         this.unknownFields = unknownFields.build();
17697         makeExtensionsImmutable();
17698       }
17699     }
17700     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17701         getDescriptor() {
17702       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_descriptor;
17703     }
17704 
17705     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17706         internalGetFieldAccessorTable() {
17707       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_fieldAccessorTable
17708           .ensureFieldAccessorsInitialized(
17709               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.Builder.class);
17710     }
17711 
17712     public static com.google.protobuf.Parser<DeleteNamespaceResponse> PARSER =
17713         new com.google.protobuf.AbstractParser<DeleteNamespaceResponse>() {
17714       public DeleteNamespaceResponse parsePartialFrom(
17715           com.google.protobuf.CodedInputStream input,
17716           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17717           throws com.google.protobuf.InvalidProtocolBufferException {
17718         return new DeleteNamespaceResponse(input, extensionRegistry);
17719       }
17720     };
17721 
17722     @java.lang.Override
getParserForType()17723     public com.google.protobuf.Parser<DeleteNamespaceResponse> getParserForType() {
17724       return PARSER;
17725     }
17726 
initFields()17727     private void initFields() {
17728     }
17729     private byte memoizedIsInitialized = -1;
isInitialized()17730     public final boolean isInitialized() {
17731       byte isInitialized = memoizedIsInitialized;
17732       if (isInitialized != -1) return isInitialized == 1;
17733 
17734       memoizedIsInitialized = 1;
17735       return true;
17736     }
17737 
writeTo(com.google.protobuf.CodedOutputStream output)17738     public void writeTo(com.google.protobuf.CodedOutputStream output)
17739                         throws java.io.IOException {
17740       getSerializedSize();
17741       getUnknownFields().writeTo(output);
17742     }
17743 
17744     private int memoizedSerializedSize = -1;
getSerializedSize()17745     public int getSerializedSize() {
17746       int size = memoizedSerializedSize;
17747       if (size != -1) return size;
17748 
17749       size = 0;
17750       size += getUnknownFields().getSerializedSize();
17751       memoizedSerializedSize = size;
17752       return size;
17753     }
17754 
17755     private static final long serialVersionUID = 0L;
17756     @java.lang.Override
writeReplace()17757     protected java.lang.Object writeReplace()
17758         throws java.io.ObjectStreamException {
17759       return super.writeReplace();
17760     }
17761 
17762     @java.lang.Override
equals(final java.lang.Object obj)17763     public boolean equals(final java.lang.Object obj) {
17764       if (obj == this) {
17765        return true;
17766       }
17767       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse)) {
17768         return super.equals(obj);
17769       }
17770       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) obj;
17771 
17772       boolean result = true;
17773       result = result &&
17774           getUnknownFields().equals(other.getUnknownFields());
17775       return result;
17776     }
17777 
17778     private int memoizedHashCode = 0;
17779     @java.lang.Override
hashCode()17780     public int hashCode() {
17781       if (memoizedHashCode != 0) {
17782         return memoizedHashCode;
17783       }
17784       int hash = 41;
17785       hash = (19 * hash) + getDescriptorForType().hashCode();
17786       hash = (29 * hash) + getUnknownFields().hashCode();
17787       memoizedHashCode = hash;
17788       return hash;
17789     }
17790 
parseFrom( com.google.protobuf.ByteString data)17791     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(
17792         com.google.protobuf.ByteString data)
17793         throws com.google.protobuf.InvalidProtocolBufferException {
17794       return PARSER.parseFrom(data);
17795     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17796     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(
17797         com.google.protobuf.ByteString data,
17798         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17799         throws com.google.protobuf.InvalidProtocolBufferException {
17800       return PARSER.parseFrom(data, extensionRegistry);
17801     }
parseFrom(byte[] data)17802     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(byte[] data)
17803         throws com.google.protobuf.InvalidProtocolBufferException {
17804       return PARSER.parseFrom(data);
17805     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17806     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(
17807         byte[] data,
17808         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17809         throws com.google.protobuf.InvalidProtocolBufferException {
17810       return PARSER.parseFrom(data, extensionRegistry);
17811     }
parseFrom(java.io.InputStream input)17812     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(java.io.InputStream input)
17813         throws java.io.IOException {
17814       return PARSER.parseFrom(input);
17815     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17816     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(
17817         java.io.InputStream input,
17818         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17819         throws java.io.IOException {
17820       return PARSER.parseFrom(input, extensionRegistry);
17821     }
parseDelimitedFrom(java.io.InputStream input)17822     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseDelimitedFrom(java.io.InputStream input)
17823         throws java.io.IOException {
17824       return PARSER.parseDelimitedFrom(input);
17825     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17826     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseDelimitedFrom(
17827         java.io.InputStream input,
17828         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17829         throws java.io.IOException {
17830       return PARSER.parseDelimitedFrom(input, extensionRegistry);
17831     }
parseFrom( com.google.protobuf.CodedInputStream input)17832     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(
17833         com.google.protobuf.CodedInputStream input)
17834         throws java.io.IOException {
17835       return PARSER.parseFrom(input);
17836     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17837     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parseFrom(
17838         com.google.protobuf.CodedInputStream input,
17839         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17840         throws java.io.IOException {
17841       return PARSER.parseFrom(input, extensionRegistry);
17842     }
17843 
newBuilder()17844     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()17845     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse prototype)17846     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse prototype) {
17847       return newBuilder().mergeFrom(prototype);
17848     }
toBuilder()17849     public Builder toBuilder() { return newBuilder(this); }
17850 
17851     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)17852     protected Builder newBuilderForType(
17853         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17854       Builder builder = new Builder(parent);
17855       return builder;
17856     }
17857     /**
17858      * Protobuf type {@code DeleteNamespaceResponse}
17859      */
17860     public static final class Builder extends
17861         com.google.protobuf.GeneratedMessage.Builder<Builder>
17862        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponseOrBuilder {
17863       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17864           getDescriptor() {
17865         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_descriptor;
17866       }
17867 
17868       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17869           internalGetFieldAccessorTable() {
17870         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_fieldAccessorTable
17871             .ensureFieldAccessorsInitialized(
17872                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.Builder.class);
17873       }
17874 
17875       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.newBuilder()
Builder()17876       private Builder() {
17877         maybeForceBuilderInitialization();
17878       }
17879 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)17880       private Builder(
17881           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17882         super(parent);
17883         maybeForceBuilderInitialization();
17884       }
maybeForceBuilderInitialization()17885       private void maybeForceBuilderInitialization() {
17886         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
17887         }
17888       }
create()17889       private static Builder create() {
17890         return new Builder();
17891       }
17892 
clear()17893       public Builder clear() {
17894         super.clear();
17895         return this;
17896       }
17897 
clone()17898       public Builder clone() {
17899         return create().mergeFrom(buildPartial());
17900       }
17901 
17902       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()17903           getDescriptorForType() {
17904         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteNamespaceResponse_descriptor;
17905       }
17906 
getDefaultInstanceForType()17907       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse getDefaultInstanceForType() {
17908         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
17909       }
17910 
build()17911       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse build() {
17912         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse result = buildPartial();
17913         if (!result.isInitialized()) {
17914           throw newUninitializedMessageException(result);
17915         }
17916         return result;
17917       }
17918 
buildPartial()17919       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse buildPartial() {
17920         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse(this);
17921         onBuilt();
17922         return result;
17923       }
17924 
mergeFrom(com.google.protobuf.Message other)17925       public Builder mergeFrom(com.google.protobuf.Message other) {
17926         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) {
17927           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse)other);
17928         } else {
17929           super.mergeFrom(other);
17930           return this;
17931         }
17932       }
17933 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse other)17934       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse other) {
17935         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance()) return this;
17936         this.mergeUnknownFields(other.getUnknownFields());
17937         return this;
17938       }
17939 
isInitialized()17940       public final boolean isInitialized() {
17941         return true;
17942       }
17943 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17944       public Builder mergeFrom(
17945           com.google.protobuf.CodedInputStream input,
17946           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17947           throws java.io.IOException {
17948         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse parsedMessage = null;
17949         try {
17950           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17951         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17952           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) e.getUnfinishedMessage();
17953           throw e;
17954         } finally {
17955           if (parsedMessage != null) {
17956             mergeFrom(parsedMessage);
17957           }
17958         }
17959         return this;
17960       }
17961 
17962       // @@protoc_insertion_point(builder_scope:DeleteNamespaceResponse)
17963     }
17964 
17965     static {
17966       defaultInstance = new DeleteNamespaceResponse(true);
defaultInstance.initFields()17967       defaultInstance.initFields();
17968     }
17969 
17970     // @@protoc_insertion_point(class_scope:DeleteNamespaceResponse)
17971   }
17972 
17973   public interface ModifyNamespaceRequestOrBuilder
17974       extends com.google.protobuf.MessageOrBuilder {
17975 
17976     // required .NamespaceDescriptor namespaceDescriptor = 1;
17977     /**
17978      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
17979      */
hasNamespaceDescriptor()17980     boolean hasNamespaceDescriptor();
17981     /**
17982      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
17983      */
getNamespaceDescriptor()17984     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor();
17985     /**
17986      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
17987      */
getNamespaceDescriptorOrBuilder()17988     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder();
17989   }
17990   /**
17991    * Protobuf type {@code ModifyNamespaceRequest}
17992    */
17993   public static final class ModifyNamespaceRequest extends
17994       com.google.protobuf.GeneratedMessage
17995       implements ModifyNamespaceRequestOrBuilder {
17996     // Use ModifyNamespaceRequest.newBuilder() to construct.
ModifyNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)17997     private ModifyNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17998       super(builder);
17999       this.unknownFields = builder.getUnknownFields();
18000     }
ModifyNamespaceRequest(boolean noInit)18001     private ModifyNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18002 
18003     private static final ModifyNamespaceRequest defaultInstance;
getDefaultInstance()18004     public static ModifyNamespaceRequest getDefaultInstance() {
18005       return defaultInstance;
18006     }
18007 
getDefaultInstanceForType()18008     public ModifyNamespaceRequest getDefaultInstanceForType() {
18009       return defaultInstance;
18010     }
18011 
18012     private final com.google.protobuf.UnknownFieldSet unknownFields;
18013     @java.lang.Override
18014     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()18015         getUnknownFields() {
18016       return this.unknownFields;
18017     }
ModifyNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18018     private ModifyNamespaceRequest(
18019         com.google.protobuf.CodedInputStream input,
18020         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18021         throws com.google.protobuf.InvalidProtocolBufferException {
18022       initFields();
18023       int mutable_bitField0_ = 0;
18024       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18025           com.google.protobuf.UnknownFieldSet.newBuilder();
18026       try {
18027         boolean done = false;
18028         while (!done) {
18029           int tag = input.readTag();
18030           switch (tag) {
18031             case 0:
18032               done = true;
18033               break;
18034             default: {
18035               if (!parseUnknownField(input, unknownFields,
18036                                      extensionRegistry, tag)) {
18037                 done = true;
18038               }
18039               break;
18040             }
18041             case 10: {
18042               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder subBuilder = null;
18043               if (((bitField0_ & 0x00000001) == 0x00000001)) {
18044                 subBuilder = namespaceDescriptor_.toBuilder();
18045               }
18046               namespaceDescriptor_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry);
18047               if (subBuilder != null) {
18048                 subBuilder.mergeFrom(namespaceDescriptor_);
18049                 namespaceDescriptor_ = subBuilder.buildPartial();
18050               }
18051               bitField0_ |= 0x00000001;
18052               break;
18053             }
18054           }
18055         }
18056       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18057         throw e.setUnfinishedMessage(this);
18058       } catch (java.io.IOException e) {
18059         throw new com.google.protobuf.InvalidProtocolBufferException(
18060             e.getMessage()).setUnfinishedMessage(this);
18061       } finally {
18062         this.unknownFields = unknownFields.build();
18063         makeExtensionsImmutable();
18064       }
18065     }
18066     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()18067         getDescriptor() {
18068       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_descriptor;
18069     }
18070 
18071     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()18072         internalGetFieldAccessorTable() {
18073       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_fieldAccessorTable
18074           .ensureFieldAccessorsInitialized(
18075               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.Builder.class);
18076     }
18077 
18078     public static com.google.protobuf.Parser<ModifyNamespaceRequest> PARSER =
18079         new com.google.protobuf.AbstractParser<ModifyNamespaceRequest>() {
18080       public ModifyNamespaceRequest parsePartialFrom(
18081           com.google.protobuf.CodedInputStream input,
18082           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18083           throws com.google.protobuf.InvalidProtocolBufferException {
18084         return new ModifyNamespaceRequest(input, extensionRegistry);
18085       }
18086     };
18087 
18088     @java.lang.Override
getParserForType()18089     public com.google.protobuf.Parser<ModifyNamespaceRequest> getParserForType() {
18090       return PARSER;
18091     }
18092 
18093     private int bitField0_;
18094     // required .NamespaceDescriptor namespaceDescriptor = 1;
18095     public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1;
18096     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_;
18097     /**
18098      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18099      */
hasNamespaceDescriptor()18100     public boolean hasNamespaceDescriptor() {
18101       return ((bitField0_ & 0x00000001) == 0x00000001);
18102     }
18103     /**
18104      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18105      */
getNamespaceDescriptor()18106     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() {
18107       return namespaceDescriptor_;
18108     }
18109     /**
18110      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18111      */
getNamespaceDescriptorOrBuilder()18112     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() {
18113       return namespaceDescriptor_;
18114     }
18115 
initFields()18116     private void initFields() {
18117       namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
18118     }
18119     private byte memoizedIsInitialized = -1;
isInitialized()18120     public final boolean isInitialized() {
18121       byte isInitialized = memoizedIsInitialized;
18122       if (isInitialized != -1) return isInitialized == 1;
18123 
18124       if (!hasNamespaceDescriptor()) {
18125         memoizedIsInitialized = 0;
18126         return false;
18127       }
18128       if (!getNamespaceDescriptor().isInitialized()) {
18129         memoizedIsInitialized = 0;
18130         return false;
18131       }
18132       memoizedIsInitialized = 1;
18133       return true;
18134     }
18135 
writeTo(com.google.protobuf.CodedOutputStream output)18136     public void writeTo(com.google.protobuf.CodedOutputStream output)
18137                         throws java.io.IOException {
18138       getSerializedSize();
18139       if (((bitField0_ & 0x00000001) == 0x00000001)) {
18140         output.writeMessage(1, namespaceDescriptor_);
18141       }
18142       getUnknownFields().writeTo(output);
18143     }
18144 
18145     private int memoizedSerializedSize = -1;
getSerializedSize()18146     public int getSerializedSize() {
18147       int size = memoizedSerializedSize;
18148       if (size != -1) return size;
18149 
18150       size = 0;
18151       if (((bitField0_ & 0x00000001) == 0x00000001)) {
18152         size += com.google.protobuf.CodedOutputStream
18153           .computeMessageSize(1, namespaceDescriptor_);
18154       }
18155       size += getUnknownFields().getSerializedSize();
18156       memoizedSerializedSize = size;
18157       return size;
18158     }
18159 
18160     private static final long serialVersionUID = 0L;
18161     @java.lang.Override
writeReplace()18162     protected java.lang.Object writeReplace()
18163         throws java.io.ObjectStreamException {
18164       return super.writeReplace();
18165     }
18166 
18167     @java.lang.Override
equals(final java.lang.Object obj)18168     public boolean equals(final java.lang.Object obj) {
18169       if (obj == this) {
18170        return true;
18171       }
18172       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)) {
18173         return super.equals(obj);
18174       }
18175       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest) obj;
18176 
18177       boolean result = true;
18178       result = result && (hasNamespaceDescriptor() == other.hasNamespaceDescriptor());
18179       if (hasNamespaceDescriptor()) {
18180         result = result && getNamespaceDescriptor()
18181             .equals(other.getNamespaceDescriptor());
18182       }
18183       result = result &&
18184           getUnknownFields().equals(other.getUnknownFields());
18185       return result;
18186     }
18187 
18188     private int memoizedHashCode = 0;
18189     @java.lang.Override
hashCode()18190     public int hashCode() {
18191       if (memoizedHashCode != 0) {
18192         return memoizedHashCode;
18193       }
18194       int hash = 41;
18195       hash = (19 * hash) + getDescriptorForType().hashCode();
18196       if (hasNamespaceDescriptor()) {
18197         hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER;
18198         hash = (53 * hash) + getNamespaceDescriptor().hashCode();
18199       }
18200       hash = (29 * hash) + getUnknownFields().hashCode();
18201       memoizedHashCode = hash;
18202       return hash;
18203     }
18204 
parseFrom( com.google.protobuf.ByteString data)18205     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(
18206         com.google.protobuf.ByteString data)
18207         throws com.google.protobuf.InvalidProtocolBufferException {
18208       return PARSER.parseFrom(data);
18209     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18210     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(
18211         com.google.protobuf.ByteString data,
18212         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18213         throws com.google.protobuf.InvalidProtocolBufferException {
18214       return PARSER.parseFrom(data, extensionRegistry);
18215     }
parseFrom(byte[] data)18216     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(byte[] data)
18217         throws com.google.protobuf.InvalidProtocolBufferException {
18218       return PARSER.parseFrom(data);
18219     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18220     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(
18221         byte[] data,
18222         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18223         throws com.google.protobuf.InvalidProtocolBufferException {
18224       return PARSER.parseFrom(data, extensionRegistry);
18225     }
parseFrom(java.io.InputStream input)18226     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(java.io.InputStream input)
18227         throws java.io.IOException {
18228       return PARSER.parseFrom(input);
18229     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18230     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(
18231         java.io.InputStream input,
18232         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18233         throws java.io.IOException {
18234       return PARSER.parseFrom(input, extensionRegistry);
18235     }
parseDelimitedFrom(java.io.InputStream input)18236     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseDelimitedFrom(java.io.InputStream input)
18237         throws java.io.IOException {
18238       return PARSER.parseDelimitedFrom(input);
18239     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18240     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseDelimitedFrom(
18241         java.io.InputStream input,
18242         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18243         throws java.io.IOException {
18244       return PARSER.parseDelimitedFrom(input, extensionRegistry);
18245     }
parseFrom( com.google.protobuf.CodedInputStream input)18246     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(
18247         com.google.protobuf.CodedInputStream input)
18248         throws java.io.IOException {
18249       return PARSER.parseFrom(input);
18250     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18251     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parseFrom(
18252         com.google.protobuf.CodedInputStream input,
18253         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18254         throws java.io.IOException {
18255       return PARSER.parseFrom(input, extensionRegistry);
18256     }
18257 
newBuilder()18258     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()18259     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest prototype)18260     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest prototype) {
18261       return newBuilder().mergeFrom(prototype);
18262     }
toBuilder()18263     public Builder toBuilder() { return newBuilder(this); }
18264 
18265     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)18266     protected Builder newBuilderForType(
18267         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18268       Builder builder = new Builder(parent);
18269       return builder;
18270     }
18271     /**
18272      * Protobuf type {@code ModifyNamespaceRequest}
18273      */
18274     public static final class Builder extends
18275         com.google.protobuf.GeneratedMessage.Builder<Builder>
18276        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequestOrBuilder {
18277       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()18278           getDescriptor() {
18279         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_descriptor;
18280       }
18281 
18282       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()18283           internalGetFieldAccessorTable() {
18284         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_fieldAccessorTable
18285             .ensureFieldAccessorsInitialized(
18286                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.Builder.class);
18287       }
18288 
18289       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.newBuilder()
Builder()18290       private Builder() {
18291         maybeForceBuilderInitialization();
18292       }
18293 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)18294       private Builder(
18295           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18296         super(parent);
18297         maybeForceBuilderInitialization();
18298       }
maybeForceBuilderInitialization()18299       private void maybeForceBuilderInitialization() {
18300         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18301           getNamespaceDescriptorFieldBuilder();
18302         }
18303       }
create()18304       private static Builder create() {
18305         return new Builder();
18306       }
18307 
clear()18308       public Builder clear() {
18309         super.clear();
18310         if (namespaceDescriptorBuilder_ == null) {
18311           namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
18312         } else {
18313           namespaceDescriptorBuilder_.clear();
18314         }
18315         bitField0_ = (bitField0_ & ~0x00000001);
18316         return this;
18317       }
18318 
clone()18319       public Builder clone() {
18320         return create().mergeFrom(buildPartial());
18321       }
18322 
18323       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()18324           getDescriptorForType() {
18325         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceRequest_descriptor;
18326       }
18327 
getDefaultInstanceForType()18328       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest getDefaultInstanceForType() {
18329         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
18330       }
18331 
build()18332       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest build() {
18333         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest result = buildPartial();
18334         if (!result.isInitialized()) {
18335           throw newUninitializedMessageException(result);
18336         }
18337         return result;
18338       }
18339 
buildPartial()18340       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest buildPartial() {
18341         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest(this);
18342         int from_bitField0_ = bitField0_;
18343         int to_bitField0_ = 0;
18344         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
18345           to_bitField0_ |= 0x00000001;
18346         }
18347         if (namespaceDescriptorBuilder_ == null) {
18348           result.namespaceDescriptor_ = namespaceDescriptor_;
18349         } else {
18350           result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build();
18351         }
18352         result.bitField0_ = to_bitField0_;
18353         onBuilt();
18354         return result;
18355       }
18356 
mergeFrom(com.google.protobuf.Message other)18357       public Builder mergeFrom(com.google.protobuf.Message other) {
18358         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest) {
18359           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)other);
18360         } else {
18361           super.mergeFrom(other);
18362           return this;
18363         }
18364       }
18365 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest other)18366       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest other) {
18367         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance()) return this;
18368         if (other.hasNamespaceDescriptor()) {
18369           mergeNamespaceDescriptor(other.getNamespaceDescriptor());
18370         }
18371         this.mergeUnknownFields(other.getUnknownFields());
18372         return this;
18373       }
18374 
isInitialized()18375       public final boolean isInitialized() {
18376         if (!hasNamespaceDescriptor()) {
18377 
18378           return false;
18379         }
18380         if (!getNamespaceDescriptor().isInitialized()) {
18381 
18382           return false;
18383         }
18384         return true;
18385       }
18386 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18387       public Builder mergeFrom(
18388           com.google.protobuf.CodedInputStream input,
18389           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18390           throws java.io.IOException {
18391         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest parsedMessage = null;
18392         try {
18393           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18394         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18395           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest) e.getUnfinishedMessage();
18396           throw e;
18397         } finally {
18398           if (parsedMessage != null) {
18399             mergeFrom(parsedMessage);
18400           }
18401         }
18402         return this;
18403       }
18404       private int bitField0_;
18405 
18406       // required .NamespaceDescriptor namespaceDescriptor = 1;
18407       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
18408       private com.google.protobuf.SingleFieldBuilder<
18409           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_;
18410       /**
18411        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18412        */
hasNamespaceDescriptor()18413       public boolean hasNamespaceDescriptor() {
18414         return ((bitField0_ & 0x00000001) == 0x00000001);
18415       }
18416       /**
18417        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18418        */
getNamespaceDescriptor()18419       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() {
18420         if (namespaceDescriptorBuilder_ == null) {
18421           return namespaceDescriptor_;
18422         } else {
18423           return namespaceDescriptorBuilder_.getMessage();
18424         }
18425       }
18426       /**
18427        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18428        */
setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)18429       public Builder setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
18430         if (namespaceDescriptorBuilder_ == null) {
18431           if (value == null) {
18432             throw new NullPointerException();
18433           }
18434           namespaceDescriptor_ = value;
18435           onChanged();
18436         } else {
18437           namespaceDescriptorBuilder_.setMessage(value);
18438         }
18439         bitField0_ |= 0x00000001;
18440         return this;
18441       }
18442       /**
18443        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18444        */
setNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue)18445       public Builder setNamespaceDescriptor(
18446           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) {
18447         if (namespaceDescriptorBuilder_ == null) {
18448           namespaceDescriptor_ = builderForValue.build();
18449           onChanged();
18450         } else {
18451           namespaceDescriptorBuilder_.setMessage(builderForValue.build());
18452         }
18453         bitField0_ |= 0x00000001;
18454         return this;
18455       }
18456       /**
18457        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18458        */
mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)18459       public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
18460         if (namespaceDescriptorBuilder_ == null) {
18461           if (((bitField0_ & 0x00000001) == 0x00000001) &&
18462               namespaceDescriptor_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) {
18463             namespaceDescriptor_ =
18464               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial();
18465           } else {
18466             namespaceDescriptor_ = value;
18467           }
18468           onChanged();
18469         } else {
18470           namespaceDescriptorBuilder_.mergeFrom(value);
18471         }
18472         bitField0_ |= 0x00000001;
18473         return this;
18474       }
18475       /**
18476        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18477        */
clearNamespaceDescriptor()18478       public Builder clearNamespaceDescriptor() {
18479         if (namespaceDescriptorBuilder_ == null) {
18480           namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
18481           onChanged();
18482         } else {
18483           namespaceDescriptorBuilder_.clear();
18484         }
18485         bitField0_ = (bitField0_ & ~0x00000001);
18486         return this;
18487       }
18488       /**
18489        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18490        */
getNamespaceDescriptorBuilder()18491       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder() {
18492         bitField0_ |= 0x00000001;
18493         onChanged();
18494         return getNamespaceDescriptorFieldBuilder().getBuilder();
18495       }
18496       /**
18497        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18498        */
getNamespaceDescriptorOrBuilder()18499       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() {
18500         if (namespaceDescriptorBuilder_ != null) {
18501           return namespaceDescriptorBuilder_.getMessageOrBuilder();
18502         } else {
18503           return namespaceDescriptor_;
18504         }
18505       }
18506       /**
18507        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
18508        */
18509       private com.google.protobuf.SingleFieldBuilder<
18510           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorFieldBuilder()18511           getNamespaceDescriptorFieldBuilder() {
18512         if (namespaceDescriptorBuilder_ == null) {
18513           namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
18514               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>(
18515                   namespaceDescriptor_,
18516                   getParentForChildren(),
18517                   isClean());
18518           namespaceDescriptor_ = null;
18519         }
18520         return namespaceDescriptorBuilder_;
18521       }
18522 
18523       // @@protoc_insertion_point(builder_scope:ModifyNamespaceRequest)
18524     }
18525 
18526     static {
18527       defaultInstance = new ModifyNamespaceRequest(true);
defaultInstance.initFields()18528       defaultInstance.initFields();
18529     }
18530 
18531     // @@protoc_insertion_point(class_scope:ModifyNamespaceRequest)
18532   }
18533 
18534   public interface ModifyNamespaceResponseOrBuilder
18535       extends com.google.protobuf.MessageOrBuilder {
18536   }
18537   /**
18538    * Protobuf type {@code ModifyNamespaceResponse}
18539    */
18540   public static final class ModifyNamespaceResponse extends
18541       com.google.protobuf.GeneratedMessage
18542       implements ModifyNamespaceResponseOrBuilder {
18543     // Use ModifyNamespaceResponse.newBuilder() to construct.
ModifyNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)18544     private ModifyNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18545       super(builder);
18546       this.unknownFields = builder.getUnknownFields();
18547     }
ModifyNamespaceResponse(boolean noInit)18548     private ModifyNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18549 
18550     private static final ModifyNamespaceResponse defaultInstance;
getDefaultInstance()18551     public static ModifyNamespaceResponse getDefaultInstance() {
18552       return defaultInstance;
18553     }
18554 
getDefaultInstanceForType()18555     public ModifyNamespaceResponse getDefaultInstanceForType() {
18556       return defaultInstance;
18557     }
18558 
18559     private final com.google.protobuf.UnknownFieldSet unknownFields;
18560     @java.lang.Override
18561     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()18562         getUnknownFields() {
18563       return this.unknownFields;
18564     }
ModifyNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18565     private ModifyNamespaceResponse(
18566         com.google.protobuf.CodedInputStream input,
18567         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18568         throws com.google.protobuf.InvalidProtocolBufferException {
18569       initFields();
18570       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18571           com.google.protobuf.UnknownFieldSet.newBuilder();
18572       try {
18573         boolean done = false;
18574         while (!done) {
18575           int tag = input.readTag();
18576           switch (tag) {
18577             case 0:
18578               done = true;
18579               break;
18580             default: {
18581               if (!parseUnknownField(input, unknownFields,
18582                                      extensionRegistry, tag)) {
18583                 done = true;
18584               }
18585               break;
18586             }
18587           }
18588         }
18589       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18590         throw e.setUnfinishedMessage(this);
18591       } catch (java.io.IOException e) {
18592         throw new com.google.protobuf.InvalidProtocolBufferException(
18593             e.getMessage()).setUnfinishedMessage(this);
18594       } finally {
18595         this.unknownFields = unknownFields.build();
18596         makeExtensionsImmutable();
18597       }
18598     }
18599     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()18600         getDescriptor() {
18601       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_descriptor;
18602     }
18603 
18604     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()18605         internalGetFieldAccessorTable() {
18606       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable
18607           .ensureFieldAccessorsInitialized(
18608               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.Builder.class);
18609     }
18610 
18611     public static com.google.protobuf.Parser<ModifyNamespaceResponse> PARSER =
18612         new com.google.protobuf.AbstractParser<ModifyNamespaceResponse>() {
18613       public ModifyNamespaceResponse parsePartialFrom(
18614           com.google.protobuf.CodedInputStream input,
18615           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18616           throws com.google.protobuf.InvalidProtocolBufferException {
18617         return new ModifyNamespaceResponse(input, extensionRegistry);
18618       }
18619     };
18620 
18621     @java.lang.Override
getParserForType()18622     public com.google.protobuf.Parser<ModifyNamespaceResponse> getParserForType() {
18623       return PARSER;
18624     }
18625 
initFields()18626     private void initFields() {
18627     }
18628     private byte memoizedIsInitialized = -1;
isInitialized()18629     public final boolean isInitialized() {
18630       byte isInitialized = memoizedIsInitialized;
18631       if (isInitialized != -1) return isInitialized == 1;
18632 
18633       memoizedIsInitialized = 1;
18634       return true;
18635     }
18636 
writeTo(com.google.protobuf.CodedOutputStream output)18637     public void writeTo(com.google.protobuf.CodedOutputStream output)
18638                         throws java.io.IOException {
18639       getSerializedSize();
18640       getUnknownFields().writeTo(output);
18641     }
18642 
18643     private int memoizedSerializedSize = -1;
getSerializedSize()18644     public int getSerializedSize() {
18645       int size = memoizedSerializedSize;
18646       if (size != -1) return size;
18647 
18648       size = 0;
18649       size += getUnknownFields().getSerializedSize();
18650       memoizedSerializedSize = size;
18651       return size;
18652     }
18653 
18654     private static final long serialVersionUID = 0L;
18655     @java.lang.Override
writeReplace()18656     protected java.lang.Object writeReplace()
18657         throws java.io.ObjectStreamException {
18658       return super.writeReplace();
18659     }
18660 
18661     @java.lang.Override
equals(final java.lang.Object obj)18662     public boolean equals(final java.lang.Object obj) {
18663       if (obj == this) {
18664        return true;
18665       }
18666       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse)) {
18667         return super.equals(obj);
18668       }
18669       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) obj;
18670 
18671       boolean result = true;
18672       result = result &&
18673           getUnknownFields().equals(other.getUnknownFields());
18674       return result;
18675     }
18676 
18677     private int memoizedHashCode = 0;
18678     @java.lang.Override
hashCode()18679     public int hashCode() {
18680       if (memoizedHashCode != 0) {
18681         return memoizedHashCode;
18682       }
18683       int hash = 41;
18684       hash = (19 * hash) + getDescriptorForType().hashCode();
18685       hash = (29 * hash) + getUnknownFields().hashCode();
18686       memoizedHashCode = hash;
18687       return hash;
18688     }
18689 
parseFrom( com.google.protobuf.ByteString data)18690     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(
18691         com.google.protobuf.ByteString data)
18692         throws com.google.protobuf.InvalidProtocolBufferException {
18693       return PARSER.parseFrom(data);
18694     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18695     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(
18696         com.google.protobuf.ByteString data,
18697         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18698         throws com.google.protobuf.InvalidProtocolBufferException {
18699       return PARSER.parseFrom(data, extensionRegistry);
18700     }
parseFrom(byte[] data)18701     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(byte[] data)
18702         throws com.google.protobuf.InvalidProtocolBufferException {
18703       return PARSER.parseFrom(data);
18704     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18705     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(
18706         byte[] data,
18707         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18708         throws com.google.protobuf.InvalidProtocolBufferException {
18709       return PARSER.parseFrom(data, extensionRegistry);
18710     }
parseFrom(java.io.InputStream input)18711     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(java.io.InputStream input)
18712         throws java.io.IOException {
18713       return PARSER.parseFrom(input);
18714     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18715     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(
18716         java.io.InputStream input,
18717         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18718         throws java.io.IOException {
18719       return PARSER.parseFrom(input, extensionRegistry);
18720     }
parseDelimitedFrom(java.io.InputStream input)18721     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseDelimitedFrom(java.io.InputStream input)
18722         throws java.io.IOException {
18723       return PARSER.parseDelimitedFrom(input);
18724     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18725     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseDelimitedFrom(
18726         java.io.InputStream input,
18727         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18728         throws java.io.IOException {
18729       return PARSER.parseDelimitedFrom(input, extensionRegistry);
18730     }
parseFrom( com.google.protobuf.CodedInputStream input)18731     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(
18732         com.google.protobuf.CodedInputStream input)
18733         throws java.io.IOException {
18734       return PARSER.parseFrom(input);
18735     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18736     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parseFrom(
18737         com.google.protobuf.CodedInputStream input,
18738         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18739         throws java.io.IOException {
18740       return PARSER.parseFrom(input, extensionRegistry);
18741     }
18742 
newBuilder()18743     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()18744     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse prototype)18745     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse prototype) {
18746       return newBuilder().mergeFrom(prototype);
18747     }
toBuilder()18748     public Builder toBuilder() { return newBuilder(this); }
18749 
18750     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)18751     protected Builder newBuilderForType(
18752         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18753       Builder builder = new Builder(parent);
18754       return builder;
18755     }
18756     /**
18757      * Protobuf type {@code ModifyNamespaceResponse}
18758      */
18759     public static final class Builder extends
18760         com.google.protobuf.GeneratedMessage.Builder<Builder>
18761        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponseOrBuilder {
18762       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()18763           getDescriptor() {
18764         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_descriptor;
18765       }
18766 
18767       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()18768           internalGetFieldAccessorTable() {
18769         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_fieldAccessorTable
18770             .ensureFieldAccessorsInitialized(
18771                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.Builder.class);
18772       }
18773 
18774       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.newBuilder()
Builder()18775       private Builder() {
18776         maybeForceBuilderInitialization();
18777       }
18778 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)18779       private Builder(
18780           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18781         super(parent);
18782         maybeForceBuilderInitialization();
18783       }
maybeForceBuilderInitialization()18784       private void maybeForceBuilderInitialization() {
18785         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18786         }
18787       }
create()18788       private static Builder create() {
18789         return new Builder();
18790       }
18791 
clear()18792       public Builder clear() {
18793         super.clear();
18794         return this;
18795       }
18796 
clone()18797       public Builder clone() {
18798         return create().mergeFrom(buildPartial());
18799       }
18800 
18801       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()18802           getDescriptorForType() {
18803         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ModifyNamespaceResponse_descriptor;
18804       }
18805 
getDefaultInstanceForType()18806       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse getDefaultInstanceForType() {
18807         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
18808       }
18809 
build()18810       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse build() {
18811         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse result = buildPartial();
18812         if (!result.isInitialized()) {
18813           throw newUninitializedMessageException(result);
18814         }
18815         return result;
18816       }
18817 
buildPartial()18818       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse buildPartial() {
18819         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse(this);
18820         onBuilt();
18821         return result;
18822       }
18823 
mergeFrom(com.google.protobuf.Message other)18824       public Builder mergeFrom(com.google.protobuf.Message other) {
18825         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) {
18826           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse)other);
18827         } else {
18828           super.mergeFrom(other);
18829           return this;
18830         }
18831       }
18832 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse other)18833       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse other) {
18834         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance()) return this;
18835         this.mergeUnknownFields(other.getUnknownFields());
18836         return this;
18837       }
18838 
isInitialized()18839       public final boolean isInitialized() {
18840         return true;
18841       }
18842 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18843       public Builder mergeFrom(
18844           com.google.protobuf.CodedInputStream input,
18845           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18846           throws java.io.IOException {
18847         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse parsedMessage = null;
18848         try {
18849           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18850         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18851           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) e.getUnfinishedMessage();
18852           throw e;
18853         } finally {
18854           if (parsedMessage != null) {
18855             mergeFrom(parsedMessage);
18856           }
18857         }
18858         return this;
18859       }
18860 
18861       // @@protoc_insertion_point(builder_scope:ModifyNamespaceResponse)
18862     }
18863 
18864     static {
18865       defaultInstance = new ModifyNamespaceResponse(true);
defaultInstance.initFields()18866       defaultInstance.initFields();
18867     }
18868 
18869     // @@protoc_insertion_point(class_scope:ModifyNamespaceResponse)
18870   }
18871 
18872   public interface GetNamespaceDescriptorRequestOrBuilder
18873       extends com.google.protobuf.MessageOrBuilder {
18874 
18875     // required string namespaceName = 1;
18876     /**
18877      * <code>required string namespaceName = 1;</code>
18878      */
hasNamespaceName()18879     boolean hasNamespaceName();
18880     /**
18881      * <code>required string namespaceName = 1;</code>
18882      */
getNamespaceName()18883     java.lang.String getNamespaceName();
18884     /**
18885      * <code>required string namespaceName = 1;</code>
18886      */
18887     com.google.protobuf.ByteString
getNamespaceNameBytes()18888         getNamespaceNameBytes();
18889   }
18890   /**
18891    * Protobuf type {@code GetNamespaceDescriptorRequest}
18892    */
18893   public static final class GetNamespaceDescriptorRequest extends
18894       com.google.protobuf.GeneratedMessage
18895       implements GetNamespaceDescriptorRequestOrBuilder {
18896     // Use GetNamespaceDescriptorRequest.newBuilder() to construct.
GetNamespaceDescriptorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)18897     private GetNamespaceDescriptorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
18898       super(builder);
18899       this.unknownFields = builder.getUnknownFields();
18900     }
GetNamespaceDescriptorRequest(boolean noInit)18901     private GetNamespaceDescriptorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
18902 
18903     private static final GetNamespaceDescriptorRequest defaultInstance;
getDefaultInstance()18904     public static GetNamespaceDescriptorRequest getDefaultInstance() {
18905       return defaultInstance;
18906     }
18907 
getDefaultInstanceForType()18908     public GetNamespaceDescriptorRequest getDefaultInstanceForType() {
18909       return defaultInstance;
18910     }
18911 
18912     private final com.google.protobuf.UnknownFieldSet unknownFields;
18913     @java.lang.Override
18914     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()18915         getUnknownFields() {
18916       return this.unknownFields;
18917     }
GetNamespaceDescriptorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18918     private GetNamespaceDescriptorRequest(
18919         com.google.protobuf.CodedInputStream input,
18920         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18921         throws com.google.protobuf.InvalidProtocolBufferException {
18922       initFields();
18923       int mutable_bitField0_ = 0;
18924       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
18925           com.google.protobuf.UnknownFieldSet.newBuilder();
18926       try {
18927         boolean done = false;
18928         while (!done) {
18929           int tag = input.readTag();
18930           switch (tag) {
18931             case 0:
18932               done = true;
18933               break;
18934             default: {
18935               if (!parseUnknownField(input, unknownFields,
18936                                      extensionRegistry, tag)) {
18937                 done = true;
18938               }
18939               break;
18940             }
18941             case 10: {
18942               bitField0_ |= 0x00000001;
18943               namespaceName_ = input.readBytes();
18944               break;
18945             }
18946           }
18947         }
18948       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18949         throw e.setUnfinishedMessage(this);
18950       } catch (java.io.IOException e) {
18951         throw new com.google.protobuf.InvalidProtocolBufferException(
18952             e.getMessage()).setUnfinishedMessage(this);
18953       } finally {
18954         this.unknownFields = unknownFields.build();
18955         makeExtensionsImmutable();
18956       }
18957     }
18958     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()18959         getDescriptor() {
18960       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_descriptor;
18961     }
18962 
18963     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()18964         internalGetFieldAccessorTable() {
18965       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable
18966           .ensureFieldAccessorsInitialized(
18967               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.Builder.class);
18968     }
18969 
18970     public static com.google.protobuf.Parser<GetNamespaceDescriptorRequest> PARSER =
18971         new com.google.protobuf.AbstractParser<GetNamespaceDescriptorRequest>() {
18972       public GetNamespaceDescriptorRequest parsePartialFrom(
18973           com.google.protobuf.CodedInputStream input,
18974           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18975           throws com.google.protobuf.InvalidProtocolBufferException {
18976         return new GetNamespaceDescriptorRequest(input, extensionRegistry);
18977       }
18978     };
18979 
18980     @java.lang.Override
getParserForType()18981     public com.google.protobuf.Parser<GetNamespaceDescriptorRequest> getParserForType() {
18982       return PARSER;
18983     }
18984 
18985     private int bitField0_;
18986     // required string namespaceName = 1;
18987     public static final int NAMESPACENAME_FIELD_NUMBER = 1;
18988     private java.lang.Object namespaceName_;
18989     /**
18990      * <code>required string namespaceName = 1;</code>
18991      */
hasNamespaceName()18992     public boolean hasNamespaceName() {
18993       return ((bitField0_ & 0x00000001) == 0x00000001);
18994     }
18995     /**
18996      * <code>required string namespaceName = 1;</code>
18997      */
getNamespaceName()18998     public java.lang.String getNamespaceName() {
18999       java.lang.Object ref = namespaceName_;
19000       if (ref instanceof java.lang.String) {
19001         return (java.lang.String) ref;
19002       } else {
19003         com.google.protobuf.ByteString bs =
19004             (com.google.protobuf.ByteString) ref;
19005         java.lang.String s = bs.toStringUtf8();
19006         if (bs.isValidUtf8()) {
19007           namespaceName_ = s;
19008         }
19009         return s;
19010       }
19011     }
19012     /**
19013      * <code>required string namespaceName = 1;</code>
19014      */
19015     public com.google.protobuf.ByteString
getNamespaceNameBytes()19016         getNamespaceNameBytes() {
19017       java.lang.Object ref = namespaceName_;
19018       if (ref instanceof java.lang.String) {
19019         com.google.protobuf.ByteString b =
19020             com.google.protobuf.ByteString.copyFromUtf8(
19021                 (java.lang.String) ref);
19022         namespaceName_ = b;
19023         return b;
19024       } else {
19025         return (com.google.protobuf.ByteString) ref;
19026       }
19027     }
19028 
initFields()19029     private void initFields() {
19030       namespaceName_ = "";
19031     }
19032     private byte memoizedIsInitialized = -1;
isInitialized()19033     public final boolean isInitialized() {
19034       byte isInitialized = memoizedIsInitialized;
19035       if (isInitialized != -1) return isInitialized == 1;
19036 
19037       if (!hasNamespaceName()) {
19038         memoizedIsInitialized = 0;
19039         return false;
19040       }
19041       memoizedIsInitialized = 1;
19042       return true;
19043     }
19044 
writeTo(com.google.protobuf.CodedOutputStream output)19045     public void writeTo(com.google.protobuf.CodedOutputStream output)
19046                         throws java.io.IOException {
19047       getSerializedSize();
19048       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19049         output.writeBytes(1, getNamespaceNameBytes());
19050       }
19051       getUnknownFields().writeTo(output);
19052     }
19053 
19054     private int memoizedSerializedSize = -1;
getSerializedSize()19055     public int getSerializedSize() {
19056       int size = memoizedSerializedSize;
19057       if (size != -1) return size;
19058 
19059       size = 0;
19060       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19061         size += com.google.protobuf.CodedOutputStream
19062           .computeBytesSize(1, getNamespaceNameBytes());
19063       }
19064       size += getUnknownFields().getSerializedSize();
19065       memoizedSerializedSize = size;
19066       return size;
19067     }
19068 
19069     private static final long serialVersionUID = 0L;
19070     @java.lang.Override
writeReplace()19071     protected java.lang.Object writeReplace()
19072         throws java.io.ObjectStreamException {
19073       return super.writeReplace();
19074     }
19075 
19076     @java.lang.Override
equals(final java.lang.Object obj)19077     public boolean equals(final java.lang.Object obj) {
19078       if (obj == this) {
19079        return true;
19080       }
19081       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)) {
19082         return super.equals(obj);
19083       }
19084       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) obj;
19085 
19086       boolean result = true;
19087       result = result && (hasNamespaceName() == other.hasNamespaceName());
19088       if (hasNamespaceName()) {
19089         result = result && getNamespaceName()
19090             .equals(other.getNamespaceName());
19091       }
19092       result = result &&
19093           getUnknownFields().equals(other.getUnknownFields());
19094       return result;
19095     }
19096 
19097     private int memoizedHashCode = 0;
19098     @java.lang.Override
hashCode()19099     public int hashCode() {
19100       if (memoizedHashCode != 0) {
19101         return memoizedHashCode;
19102       }
19103       int hash = 41;
19104       hash = (19 * hash) + getDescriptorForType().hashCode();
19105       if (hasNamespaceName()) {
19106         hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER;
19107         hash = (53 * hash) + getNamespaceName().hashCode();
19108       }
19109       hash = (29 * hash) + getUnknownFields().hashCode();
19110       memoizedHashCode = hash;
19111       return hash;
19112     }
19113 
parseFrom( com.google.protobuf.ByteString data)19114     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(
19115         com.google.protobuf.ByteString data)
19116         throws com.google.protobuf.InvalidProtocolBufferException {
19117       return PARSER.parseFrom(data);
19118     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19119     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(
19120         com.google.protobuf.ByteString data,
19121         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19122         throws com.google.protobuf.InvalidProtocolBufferException {
19123       return PARSER.parseFrom(data, extensionRegistry);
19124     }
parseFrom(byte[] data)19125     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(byte[] data)
19126         throws com.google.protobuf.InvalidProtocolBufferException {
19127       return PARSER.parseFrom(data);
19128     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19129     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(
19130         byte[] data,
19131         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19132         throws com.google.protobuf.InvalidProtocolBufferException {
19133       return PARSER.parseFrom(data, extensionRegistry);
19134     }
parseFrom(java.io.InputStream input)19135     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(java.io.InputStream input)
19136         throws java.io.IOException {
19137       return PARSER.parseFrom(input);
19138     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19139     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(
19140         java.io.InputStream input,
19141         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19142         throws java.io.IOException {
19143       return PARSER.parseFrom(input, extensionRegistry);
19144     }
parseDelimitedFrom(java.io.InputStream input)19145     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseDelimitedFrom(java.io.InputStream input)
19146         throws java.io.IOException {
19147       return PARSER.parseDelimitedFrom(input);
19148     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19149     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseDelimitedFrom(
19150         java.io.InputStream input,
19151         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19152         throws java.io.IOException {
19153       return PARSER.parseDelimitedFrom(input, extensionRegistry);
19154     }
parseFrom( com.google.protobuf.CodedInputStream input)19155     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(
19156         com.google.protobuf.CodedInputStream input)
19157         throws java.io.IOException {
19158       return PARSER.parseFrom(input);
19159     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19160     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parseFrom(
19161         com.google.protobuf.CodedInputStream input,
19162         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19163         throws java.io.IOException {
19164       return PARSER.parseFrom(input, extensionRegistry);
19165     }
19166 
newBuilder()19167     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()19168     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest prototype)19169     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest prototype) {
19170       return newBuilder().mergeFrom(prototype);
19171     }
toBuilder()19172     public Builder toBuilder() { return newBuilder(this); }
19173 
19174     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)19175     protected Builder newBuilderForType(
19176         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19177       Builder builder = new Builder(parent);
19178       return builder;
19179     }
19180     /**
19181      * Protobuf type {@code GetNamespaceDescriptorRequest}
19182      */
19183     public static final class Builder extends
19184         com.google.protobuf.GeneratedMessage.Builder<Builder>
19185        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequestOrBuilder {
19186       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()19187           getDescriptor() {
19188         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_descriptor;
19189       }
19190 
19191       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()19192           internalGetFieldAccessorTable() {
19193         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable
19194             .ensureFieldAccessorsInitialized(
19195                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.Builder.class);
19196       }
19197 
19198       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.newBuilder()
Builder()19199       private Builder() {
19200         maybeForceBuilderInitialization();
19201       }
19202 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)19203       private Builder(
19204           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19205         super(parent);
19206         maybeForceBuilderInitialization();
19207       }
maybeForceBuilderInitialization()19208       private void maybeForceBuilderInitialization() {
19209         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19210         }
19211       }
create()19212       private static Builder create() {
19213         return new Builder();
19214       }
19215 
clear()19216       public Builder clear() {
19217         super.clear();
19218         namespaceName_ = "";
19219         bitField0_ = (bitField0_ & ~0x00000001);
19220         return this;
19221       }
19222 
clone()19223       public Builder clone() {
19224         return create().mergeFrom(buildPartial());
19225       }
19226 
19227       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()19228           getDescriptorForType() {
19229         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorRequest_descriptor;
19230       }
19231 
getDefaultInstanceForType()19232       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest getDefaultInstanceForType() {
19233         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
19234       }
19235 
build()19236       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest build() {
19237         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest result = buildPartial();
19238         if (!result.isInitialized()) {
19239           throw newUninitializedMessageException(result);
19240         }
19241         return result;
19242       }
19243 
buildPartial()19244       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest buildPartial() {
19245         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest(this);
19246         int from_bitField0_ = bitField0_;
19247         int to_bitField0_ = 0;
19248         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19249           to_bitField0_ |= 0x00000001;
19250         }
19251         result.namespaceName_ = namespaceName_;
19252         result.bitField0_ = to_bitField0_;
19253         onBuilt();
19254         return result;
19255       }
19256 
mergeFrom(com.google.protobuf.Message other)19257       public Builder mergeFrom(com.google.protobuf.Message other) {
19258         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) {
19259           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)other);
19260         } else {
19261           super.mergeFrom(other);
19262           return this;
19263         }
19264       }
19265 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest other)19266       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest other) {
19267         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance()) return this;
19268         if (other.hasNamespaceName()) {
19269           bitField0_ |= 0x00000001;
19270           namespaceName_ = other.namespaceName_;
19271           onChanged();
19272         }
19273         this.mergeUnknownFields(other.getUnknownFields());
19274         return this;
19275       }
19276 
isInitialized()19277       public final boolean isInitialized() {
19278         if (!hasNamespaceName()) {
19279 
19280           return false;
19281         }
19282         return true;
19283       }
19284 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19285       public Builder mergeFrom(
19286           com.google.protobuf.CodedInputStream input,
19287           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19288           throws java.io.IOException {
19289         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest parsedMessage = null;
19290         try {
19291           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19292         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19293           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) e.getUnfinishedMessage();
19294           throw e;
19295         } finally {
19296           if (parsedMessage != null) {
19297             mergeFrom(parsedMessage);
19298           }
19299         }
19300         return this;
19301       }
19302       private int bitField0_;
19303 
19304       // required string namespaceName = 1;
19305       private java.lang.Object namespaceName_ = "";
19306       /**
19307        * <code>required string namespaceName = 1;</code>
19308        */
hasNamespaceName()19309       public boolean hasNamespaceName() {
19310         return ((bitField0_ & 0x00000001) == 0x00000001);
19311       }
19312       /**
19313        * <code>required string namespaceName = 1;</code>
19314        */
getNamespaceName()19315       public java.lang.String getNamespaceName() {
19316         java.lang.Object ref = namespaceName_;
19317         if (!(ref instanceof java.lang.String)) {
19318           java.lang.String s = ((com.google.protobuf.ByteString) ref)
19319               .toStringUtf8();
19320           namespaceName_ = s;
19321           return s;
19322         } else {
19323           return (java.lang.String) ref;
19324         }
19325       }
19326       /**
19327        * <code>required string namespaceName = 1;</code>
19328        */
19329       public com.google.protobuf.ByteString
getNamespaceNameBytes()19330           getNamespaceNameBytes() {
19331         java.lang.Object ref = namespaceName_;
19332         if (ref instanceof String) {
19333           com.google.protobuf.ByteString b =
19334               com.google.protobuf.ByteString.copyFromUtf8(
19335                   (java.lang.String) ref);
19336           namespaceName_ = b;
19337           return b;
19338         } else {
19339           return (com.google.protobuf.ByteString) ref;
19340         }
19341       }
19342       /**
19343        * <code>required string namespaceName = 1;</code>
19344        */
setNamespaceName( java.lang.String value)19345       public Builder setNamespaceName(
19346           java.lang.String value) {
19347         if (value == null) {
19348     throw new NullPointerException();
19349   }
19350   bitField0_ |= 0x00000001;
19351         namespaceName_ = value;
19352         onChanged();
19353         return this;
19354       }
19355       /**
19356        * <code>required string namespaceName = 1;</code>
19357        */
clearNamespaceName()19358       public Builder clearNamespaceName() {
19359         bitField0_ = (bitField0_ & ~0x00000001);
19360         namespaceName_ = getDefaultInstance().getNamespaceName();
19361         onChanged();
19362         return this;
19363       }
19364       /**
19365        * <code>required string namespaceName = 1;</code>
19366        */
setNamespaceNameBytes( com.google.protobuf.ByteString value)19367       public Builder setNamespaceNameBytes(
19368           com.google.protobuf.ByteString value) {
19369         if (value == null) {
19370     throw new NullPointerException();
19371   }
19372   bitField0_ |= 0x00000001;
19373         namespaceName_ = value;
19374         onChanged();
19375         return this;
19376       }
19377 
19378       // @@protoc_insertion_point(builder_scope:GetNamespaceDescriptorRequest)
19379     }
19380 
19381     static {
19382       defaultInstance = new GetNamespaceDescriptorRequest(true);
defaultInstance.initFields()19383       defaultInstance.initFields();
19384     }
19385 
19386     // @@protoc_insertion_point(class_scope:GetNamespaceDescriptorRequest)
19387   }
19388 
19389   public interface GetNamespaceDescriptorResponseOrBuilder
19390       extends com.google.protobuf.MessageOrBuilder {
19391 
19392     // required .NamespaceDescriptor namespaceDescriptor = 1;
19393     /**
19394      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19395      */
hasNamespaceDescriptor()19396     boolean hasNamespaceDescriptor();
19397     /**
19398      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19399      */
getNamespaceDescriptor()19400     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor();
19401     /**
19402      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19403      */
getNamespaceDescriptorOrBuilder()19404     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder();
19405   }
19406   /**
19407    * Protobuf type {@code GetNamespaceDescriptorResponse}
19408    */
19409   public static final class GetNamespaceDescriptorResponse extends
19410       com.google.protobuf.GeneratedMessage
19411       implements GetNamespaceDescriptorResponseOrBuilder {
19412     // Use GetNamespaceDescriptorResponse.newBuilder() to construct.
GetNamespaceDescriptorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)19413     private GetNamespaceDescriptorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19414       super(builder);
19415       this.unknownFields = builder.getUnknownFields();
19416     }
GetNamespaceDescriptorResponse(boolean noInit)19417     private GetNamespaceDescriptorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19418 
19419     private static final GetNamespaceDescriptorResponse defaultInstance;
getDefaultInstance()19420     public static GetNamespaceDescriptorResponse getDefaultInstance() {
19421       return defaultInstance;
19422     }
19423 
getDefaultInstanceForType()19424     public GetNamespaceDescriptorResponse getDefaultInstanceForType() {
19425       return defaultInstance;
19426     }
19427 
19428     private final com.google.protobuf.UnknownFieldSet unknownFields;
19429     @java.lang.Override
19430     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()19431         getUnknownFields() {
19432       return this.unknownFields;
19433     }
GetNamespaceDescriptorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19434     private GetNamespaceDescriptorResponse(
19435         com.google.protobuf.CodedInputStream input,
19436         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19437         throws com.google.protobuf.InvalidProtocolBufferException {
19438       initFields();
19439       int mutable_bitField0_ = 0;
19440       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19441           com.google.protobuf.UnknownFieldSet.newBuilder();
19442       try {
19443         boolean done = false;
19444         while (!done) {
19445           int tag = input.readTag();
19446           switch (tag) {
19447             case 0:
19448               done = true;
19449               break;
19450             default: {
19451               if (!parseUnknownField(input, unknownFields,
19452                                      extensionRegistry, tag)) {
19453                 done = true;
19454               }
19455               break;
19456             }
19457             case 10: {
19458               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder subBuilder = null;
19459               if (((bitField0_ & 0x00000001) == 0x00000001)) {
19460                 subBuilder = namespaceDescriptor_.toBuilder();
19461               }
19462               namespaceDescriptor_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry);
19463               if (subBuilder != null) {
19464                 subBuilder.mergeFrom(namespaceDescriptor_);
19465                 namespaceDescriptor_ = subBuilder.buildPartial();
19466               }
19467               bitField0_ |= 0x00000001;
19468               break;
19469             }
19470           }
19471         }
19472       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19473         throw e.setUnfinishedMessage(this);
19474       } catch (java.io.IOException e) {
19475         throw new com.google.protobuf.InvalidProtocolBufferException(
19476             e.getMessage()).setUnfinishedMessage(this);
19477       } finally {
19478         this.unknownFields = unknownFields.build();
19479         makeExtensionsImmutable();
19480       }
19481     }
19482     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()19483         getDescriptor() {
19484       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_descriptor;
19485     }
19486 
19487     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()19488         internalGetFieldAccessorTable() {
19489       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable
19490           .ensureFieldAccessorsInitialized(
19491               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.Builder.class);
19492     }
19493 
19494     public static com.google.protobuf.Parser<GetNamespaceDescriptorResponse> PARSER =
19495         new com.google.protobuf.AbstractParser<GetNamespaceDescriptorResponse>() {
19496       public GetNamespaceDescriptorResponse parsePartialFrom(
19497           com.google.protobuf.CodedInputStream input,
19498           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19499           throws com.google.protobuf.InvalidProtocolBufferException {
19500         return new GetNamespaceDescriptorResponse(input, extensionRegistry);
19501       }
19502     };
19503 
19504     @java.lang.Override
getParserForType()19505     public com.google.protobuf.Parser<GetNamespaceDescriptorResponse> getParserForType() {
19506       return PARSER;
19507     }
19508 
19509     private int bitField0_;
19510     // required .NamespaceDescriptor namespaceDescriptor = 1;
19511     public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1;
19512     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_;
19513     /**
19514      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19515      */
hasNamespaceDescriptor()19516     public boolean hasNamespaceDescriptor() {
19517       return ((bitField0_ & 0x00000001) == 0x00000001);
19518     }
19519     /**
19520      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19521      */
getNamespaceDescriptor()19522     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() {
19523       return namespaceDescriptor_;
19524     }
19525     /**
19526      * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19527      */
getNamespaceDescriptorOrBuilder()19528     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() {
19529       return namespaceDescriptor_;
19530     }
19531 
initFields()19532     private void initFields() {
19533       namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
19534     }
19535     private byte memoizedIsInitialized = -1;
isInitialized()19536     public final boolean isInitialized() {
19537       byte isInitialized = memoizedIsInitialized;
19538       if (isInitialized != -1) return isInitialized == 1;
19539 
19540       if (!hasNamespaceDescriptor()) {
19541         memoizedIsInitialized = 0;
19542         return false;
19543       }
19544       if (!getNamespaceDescriptor().isInitialized()) {
19545         memoizedIsInitialized = 0;
19546         return false;
19547       }
19548       memoizedIsInitialized = 1;
19549       return true;
19550     }
19551 
writeTo(com.google.protobuf.CodedOutputStream output)19552     public void writeTo(com.google.protobuf.CodedOutputStream output)
19553                         throws java.io.IOException {
19554       getSerializedSize();
19555       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19556         output.writeMessage(1, namespaceDescriptor_);
19557       }
19558       getUnknownFields().writeTo(output);
19559     }
19560 
19561     private int memoizedSerializedSize = -1;
getSerializedSize()19562     public int getSerializedSize() {
19563       int size = memoizedSerializedSize;
19564       if (size != -1) return size;
19565 
19566       size = 0;
19567       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19568         size += com.google.protobuf.CodedOutputStream
19569           .computeMessageSize(1, namespaceDescriptor_);
19570       }
19571       size += getUnknownFields().getSerializedSize();
19572       memoizedSerializedSize = size;
19573       return size;
19574     }
19575 
19576     private static final long serialVersionUID = 0L;
19577     @java.lang.Override
writeReplace()19578     protected java.lang.Object writeReplace()
19579         throws java.io.ObjectStreamException {
19580       return super.writeReplace();
19581     }
19582 
19583     @java.lang.Override
equals(final java.lang.Object obj)19584     public boolean equals(final java.lang.Object obj) {
19585       if (obj == this) {
19586        return true;
19587       }
19588       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse)) {
19589         return super.equals(obj);
19590       }
19591       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) obj;
19592 
19593       boolean result = true;
19594       result = result && (hasNamespaceDescriptor() == other.hasNamespaceDescriptor());
19595       if (hasNamespaceDescriptor()) {
19596         result = result && getNamespaceDescriptor()
19597             .equals(other.getNamespaceDescriptor());
19598       }
19599       result = result &&
19600           getUnknownFields().equals(other.getUnknownFields());
19601       return result;
19602     }
19603 
19604     private int memoizedHashCode = 0;
19605     @java.lang.Override
hashCode()19606     public int hashCode() {
19607       if (memoizedHashCode != 0) {
19608         return memoizedHashCode;
19609       }
19610       int hash = 41;
19611       hash = (19 * hash) + getDescriptorForType().hashCode();
19612       if (hasNamespaceDescriptor()) {
19613         hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER;
19614         hash = (53 * hash) + getNamespaceDescriptor().hashCode();
19615       }
19616       hash = (29 * hash) + getUnknownFields().hashCode();
19617       memoizedHashCode = hash;
19618       return hash;
19619     }
19620 
parseFrom( com.google.protobuf.ByteString data)19621     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(
19622         com.google.protobuf.ByteString data)
19623         throws com.google.protobuf.InvalidProtocolBufferException {
19624       return PARSER.parseFrom(data);
19625     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19626     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(
19627         com.google.protobuf.ByteString data,
19628         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19629         throws com.google.protobuf.InvalidProtocolBufferException {
19630       return PARSER.parseFrom(data, extensionRegistry);
19631     }
parseFrom(byte[] data)19632     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(byte[] data)
19633         throws com.google.protobuf.InvalidProtocolBufferException {
19634       return PARSER.parseFrom(data);
19635     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19636     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(
19637         byte[] data,
19638         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19639         throws com.google.protobuf.InvalidProtocolBufferException {
19640       return PARSER.parseFrom(data, extensionRegistry);
19641     }
parseFrom(java.io.InputStream input)19642     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(java.io.InputStream input)
19643         throws java.io.IOException {
19644       return PARSER.parseFrom(input);
19645     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19646     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(
19647         java.io.InputStream input,
19648         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19649         throws java.io.IOException {
19650       return PARSER.parseFrom(input, extensionRegistry);
19651     }
parseDelimitedFrom(java.io.InputStream input)19652     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseDelimitedFrom(java.io.InputStream input)
19653         throws java.io.IOException {
19654       return PARSER.parseDelimitedFrom(input);
19655     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19656     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseDelimitedFrom(
19657         java.io.InputStream input,
19658         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19659         throws java.io.IOException {
19660       return PARSER.parseDelimitedFrom(input, extensionRegistry);
19661     }
parseFrom( com.google.protobuf.CodedInputStream input)19662     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(
19663         com.google.protobuf.CodedInputStream input)
19664         throws java.io.IOException {
19665       return PARSER.parseFrom(input);
19666     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19667     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parseFrom(
19668         com.google.protobuf.CodedInputStream input,
19669         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19670         throws java.io.IOException {
19671       return PARSER.parseFrom(input, extensionRegistry);
19672     }
19673 
newBuilder()19674     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()19675     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse prototype)19676     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse prototype) {
19677       return newBuilder().mergeFrom(prototype);
19678     }
toBuilder()19679     public Builder toBuilder() { return newBuilder(this); }
19680 
19681     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)19682     protected Builder newBuilderForType(
19683         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19684       Builder builder = new Builder(parent);
19685       return builder;
19686     }
19687     /**
19688      * Protobuf type {@code GetNamespaceDescriptorResponse}
19689      */
19690     public static final class Builder extends
19691         com.google.protobuf.GeneratedMessage.Builder<Builder>
19692        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponseOrBuilder {
19693       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()19694           getDescriptor() {
19695         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_descriptor;
19696       }
19697 
19698       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()19699           internalGetFieldAccessorTable() {
19700         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable
19701             .ensureFieldAccessorsInitialized(
19702                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.Builder.class);
19703       }
19704 
19705       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.newBuilder()
Builder()19706       private Builder() {
19707         maybeForceBuilderInitialization();
19708       }
19709 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)19710       private Builder(
19711           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19712         super(parent);
19713         maybeForceBuilderInitialization();
19714       }
maybeForceBuilderInitialization()19715       private void maybeForceBuilderInitialization() {
19716         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19717           getNamespaceDescriptorFieldBuilder();
19718         }
19719       }
create()19720       private static Builder create() {
19721         return new Builder();
19722       }
19723 
clear()19724       public Builder clear() {
19725         super.clear();
19726         if (namespaceDescriptorBuilder_ == null) {
19727           namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
19728         } else {
19729           namespaceDescriptorBuilder_.clear();
19730         }
19731         bitField0_ = (bitField0_ & ~0x00000001);
19732         return this;
19733       }
19734 
clone()19735       public Builder clone() {
19736         return create().mergeFrom(buildPartial());
19737       }
19738 
19739       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()19740           getDescriptorForType() {
19741         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetNamespaceDescriptorResponse_descriptor;
19742       }
19743 
getDefaultInstanceForType()19744       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getDefaultInstanceForType() {
19745         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
19746       }
19747 
build()19748       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse build() {
19749         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse result = buildPartial();
19750         if (!result.isInitialized()) {
19751           throw newUninitializedMessageException(result);
19752         }
19753         return result;
19754       }
19755 
buildPartial()19756       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse buildPartial() {
19757         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse(this);
19758         int from_bitField0_ = bitField0_;
19759         int to_bitField0_ = 0;
19760         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
19761           to_bitField0_ |= 0x00000001;
19762         }
19763         if (namespaceDescriptorBuilder_ == null) {
19764           result.namespaceDescriptor_ = namespaceDescriptor_;
19765         } else {
19766           result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build();
19767         }
19768         result.bitField0_ = to_bitField0_;
19769         onBuilt();
19770         return result;
19771       }
19772 
mergeFrom(com.google.protobuf.Message other)19773       public Builder mergeFrom(com.google.protobuf.Message other) {
19774         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) {
19775           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse)other);
19776         } else {
19777           super.mergeFrom(other);
19778           return this;
19779         }
19780       }
19781 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse other)19782       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse other) {
19783         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance()) return this;
19784         if (other.hasNamespaceDescriptor()) {
19785           mergeNamespaceDescriptor(other.getNamespaceDescriptor());
19786         }
19787         this.mergeUnknownFields(other.getUnknownFields());
19788         return this;
19789       }
19790 
isInitialized()19791       public final boolean isInitialized() {
19792         if (!hasNamespaceDescriptor()) {
19793 
19794           return false;
19795         }
19796         if (!getNamespaceDescriptor().isInitialized()) {
19797 
19798           return false;
19799         }
19800         return true;
19801       }
19802 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19803       public Builder mergeFrom(
19804           com.google.protobuf.CodedInputStream input,
19805           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19806           throws java.io.IOException {
19807         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse parsedMessage = null;
19808         try {
19809           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
19810         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19811           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) e.getUnfinishedMessage();
19812           throw e;
19813         } finally {
19814           if (parsedMessage != null) {
19815             mergeFrom(parsedMessage);
19816           }
19817         }
19818         return this;
19819       }
19820       private int bitField0_;
19821 
19822       // required .NamespaceDescriptor namespaceDescriptor = 1;
19823       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
19824       private com.google.protobuf.SingleFieldBuilder<
19825           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_;
19826       /**
19827        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19828        */
hasNamespaceDescriptor()19829       public boolean hasNamespaceDescriptor() {
19830         return ((bitField0_ & 0x00000001) == 0x00000001);
19831       }
19832       /**
19833        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19834        */
getNamespaceDescriptor()19835       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor() {
19836         if (namespaceDescriptorBuilder_ == null) {
19837           return namespaceDescriptor_;
19838         } else {
19839           return namespaceDescriptorBuilder_.getMessage();
19840         }
19841       }
19842       /**
19843        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19844        */
setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)19845       public Builder setNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
19846         if (namespaceDescriptorBuilder_ == null) {
19847           if (value == null) {
19848             throw new NullPointerException();
19849           }
19850           namespaceDescriptor_ = value;
19851           onChanged();
19852         } else {
19853           namespaceDescriptorBuilder_.setMessage(value);
19854         }
19855         bitField0_ |= 0x00000001;
19856         return this;
19857       }
19858       /**
19859        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19860        */
setNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue)19861       public Builder setNamespaceDescriptor(
19862           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) {
19863         if (namespaceDescriptorBuilder_ == null) {
19864           namespaceDescriptor_ = builderForValue.build();
19865           onChanged();
19866         } else {
19867           namespaceDescriptorBuilder_.setMessage(builderForValue.build());
19868         }
19869         bitField0_ |= 0x00000001;
19870         return this;
19871       }
19872       /**
19873        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19874        */
mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)19875       public Builder mergeNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
19876         if (namespaceDescriptorBuilder_ == null) {
19877           if (((bitField0_ & 0x00000001) == 0x00000001) &&
19878               namespaceDescriptor_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance()) {
19879             namespaceDescriptor_ =
19880               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.newBuilder(namespaceDescriptor_).mergeFrom(value).buildPartial();
19881           } else {
19882             namespaceDescriptor_ = value;
19883           }
19884           onChanged();
19885         } else {
19886           namespaceDescriptorBuilder_.mergeFrom(value);
19887         }
19888         bitField0_ |= 0x00000001;
19889         return this;
19890       }
19891       /**
19892        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19893        */
clearNamespaceDescriptor()19894       public Builder clearNamespaceDescriptor() {
19895         if (namespaceDescriptorBuilder_ == null) {
19896           namespaceDescriptor_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance();
19897           onChanged();
19898         } else {
19899           namespaceDescriptorBuilder_.clear();
19900         }
19901         bitField0_ = (bitField0_ & ~0x00000001);
19902         return this;
19903       }
19904       /**
19905        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19906        */
getNamespaceDescriptorBuilder()19907       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder() {
19908         bitField0_ |= 0x00000001;
19909         onChanged();
19910         return getNamespaceDescriptorFieldBuilder().getBuilder();
19911       }
19912       /**
19913        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19914        */
getNamespaceDescriptorOrBuilder()19915       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder() {
19916         if (namespaceDescriptorBuilder_ != null) {
19917           return namespaceDescriptorBuilder_.getMessageOrBuilder();
19918         } else {
19919           return namespaceDescriptor_;
19920         }
19921       }
19922       /**
19923        * <code>required .NamespaceDescriptor namespaceDescriptor = 1;</code>
19924        */
19925       private com.google.protobuf.SingleFieldBuilder<
19926           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorFieldBuilder()19927           getNamespaceDescriptorFieldBuilder() {
19928         if (namespaceDescriptorBuilder_ == null) {
19929           namespaceDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
19930               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>(
19931                   namespaceDescriptor_,
19932                   getParentForChildren(),
19933                   isClean());
19934           namespaceDescriptor_ = null;
19935         }
19936         return namespaceDescriptorBuilder_;
19937       }
19938 
19939       // @@protoc_insertion_point(builder_scope:GetNamespaceDescriptorResponse)
19940     }
19941 
19942     static {
19943       defaultInstance = new GetNamespaceDescriptorResponse(true);
defaultInstance.initFields()19944       defaultInstance.initFields();
19945     }
19946 
19947     // @@protoc_insertion_point(class_scope:GetNamespaceDescriptorResponse)
19948   }
19949 
19950   public interface ListNamespaceDescriptorsRequestOrBuilder
19951       extends com.google.protobuf.MessageOrBuilder {
19952   }
19953   /**
19954    * Protobuf type {@code ListNamespaceDescriptorsRequest}
19955    */
19956   public static final class ListNamespaceDescriptorsRequest extends
19957       com.google.protobuf.GeneratedMessage
19958       implements ListNamespaceDescriptorsRequestOrBuilder {
19959     // Use ListNamespaceDescriptorsRequest.newBuilder() to construct.
ListNamespaceDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)19960     private ListNamespaceDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19961       super(builder);
19962       this.unknownFields = builder.getUnknownFields();
19963     }
ListNamespaceDescriptorsRequest(boolean noInit)19964     private ListNamespaceDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19965 
19966     private static final ListNamespaceDescriptorsRequest defaultInstance;
getDefaultInstance()19967     public static ListNamespaceDescriptorsRequest getDefaultInstance() {
19968       return defaultInstance;
19969     }
19970 
getDefaultInstanceForType()19971     public ListNamespaceDescriptorsRequest getDefaultInstanceForType() {
19972       return defaultInstance;
19973     }
19974 
19975     private final com.google.protobuf.UnknownFieldSet unknownFields;
19976     @java.lang.Override
19977     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()19978         getUnknownFields() {
19979       return this.unknownFields;
19980     }
ListNamespaceDescriptorsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19981     private ListNamespaceDescriptorsRequest(
19982         com.google.protobuf.CodedInputStream input,
19983         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19984         throws com.google.protobuf.InvalidProtocolBufferException {
19985       initFields();
19986       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19987           com.google.protobuf.UnknownFieldSet.newBuilder();
19988       try {
19989         boolean done = false;
19990         while (!done) {
19991           int tag = input.readTag();
19992           switch (tag) {
19993             case 0:
19994               done = true;
19995               break;
19996             default: {
19997               if (!parseUnknownField(input, unknownFields,
19998                                      extensionRegistry, tag)) {
19999                 done = true;
20000               }
20001               break;
20002             }
20003           }
20004         }
20005       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20006         throw e.setUnfinishedMessage(this);
20007       } catch (java.io.IOException e) {
20008         throw new com.google.protobuf.InvalidProtocolBufferException(
20009             e.getMessage()).setUnfinishedMessage(this);
20010       } finally {
20011         this.unknownFields = unknownFields.build();
20012         makeExtensionsImmutable();
20013       }
20014     }
20015     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()20016         getDescriptor() {
20017       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_descriptor;
20018     }
20019 
20020     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()20021         internalGetFieldAccessorTable() {
20022       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable
20023           .ensureFieldAccessorsInitialized(
20024               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.Builder.class);
20025     }
20026 
20027     public static com.google.protobuf.Parser<ListNamespaceDescriptorsRequest> PARSER =
20028         new com.google.protobuf.AbstractParser<ListNamespaceDescriptorsRequest>() {
20029       public ListNamespaceDescriptorsRequest parsePartialFrom(
20030           com.google.protobuf.CodedInputStream input,
20031           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20032           throws com.google.protobuf.InvalidProtocolBufferException {
20033         return new ListNamespaceDescriptorsRequest(input, extensionRegistry);
20034       }
20035     };
20036 
20037     @java.lang.Override
getParserForType()20038     public com.google.protobuf.Parser<ListNamespaceDescriptorsRequest> getParserForType() {
20039       return PARSER;
20040     }
20041 
initFields()20042     private void initFields() {
20043     }
20044     private byte memoizedIsInitialized = -1;
isInitialized()20045     public final boolean isInitialized() {
20046       byte isInitialized = memoizedIsInitialized;
20047       if (isInitialized != -1) return isInitialized == 1;
20048 
20049       memoizedIsInitialized = 1;
20050       return true;
20051     }
20052 
writeTo(com.google.protobuf.CodedOutputStream output)20053     public void writeTo(com.google.protobuf.CodedOutputStream output)
20054                         throws java.io.IOException {
20055       getSerializedSize();
20056       getUnknownFields().writeTo(output);
20057     }
20058 
20059     private int memoizedSerializedSize = -1;
getSerializedSize()20060     public int getSerializedSize() {
20061       int size = memoizedSerializedSize;
20062       if (size != -1) return size;
20063 
20064       size = 0;
20065       size += getUnknownFields().getSerializedSize();
20066       memoizedSerializedSize = size;
20067       return size;
20068     }
20069 
20070     private static final long serialVersionUID = 0L;
20071     @java.lang.Override
writeReplace()20072     protected java.lang.Object writeReplace()
20073         throws java.io.ObjectStreamException {
20074       return super.writeReplace();
20075     }
20076 
20077     @java.lang.Override
equals(final java.lang.Object obj)20078     public boolean equals(final java.lang.Object obj) {
20079       if (obj == this) {
20080        return true;
20081       }
20082       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)) {
20083         return super.equals(obj);
20084       }
20085       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) obj;
20086 
20087       boolean result = true;
20088       result = result &&
20089           getUnknownFields().equals(other.getUnknownFields());
20090       return result;
20091     }
20092 
20093     private int memoizedHashCode = 0;
20094     @java.lang.Override
hashCode()20095     public int hashCode() {
20096       if (memoizedHashCode != 0) {
20097         return memoizedHashCode;
20098       }
20099       int hash = 41;
20100       hash = (19 * hash) + getDescriptorForType().hashCode();
20101       hash = (29 * hash) + getUnknownFields().hashCode();
20102       memoizedHashCode = hash;
20103       return hash;
20104     }
20105 
parseFrom( com.google.protobuf.ByteString data)20106     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(
20107         com.google.protobuf.ByteString data)
20108         throws com.google.protobuf.InvalidProtocolBufferException {
20109       return PARSER.parseFrom(data);
20110     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20111     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(
20112         com.google.protobuf.ByteString data,
20113         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20114         throws com.google.protobuf.InvalidProtocolBufferException {
20115       return PARSER.parseFrom(data, extensionRegistry);
20116     }
parseFrom(byte[] data)20117     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(byte[] data)
20118         throws com.google.protobuf.InvalidProtocolBufferException {
20119       return PARSER.parseFrom(data);
20120     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20121     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(
20122         byte[] data,
20123         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20124         throws com.google.protobuf.InvalidProtocolBufferException {
20125       return PARSER.parseFrom(data, extensionRegistry);
20126     }
parseFrom(java.io.InputStream input)20127     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(java.io.InputStream input)
20128         throws java.io.IOException {
20129       return PARSER.parseFrom(input);
20130     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20131     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(
20132         java.io.InputStream input,
20133         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20134         throws java.io.IOException {
20135       return PARSER.parseFrom(input, extensionRegistry);
20136     }
parseDelimitedFrom(java.io.InputStream input)20137     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseDelimitedFrom(java.io.InputStream input)
20138         throws java.io.IOException {
20139       return PARSER.parseDelimitedFrom(input);
20140     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20141     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseDelimitedFrom(
20142         java.io.InputStream input,
20143         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20144         throws java.io.IOException {
20145       return PARSER.parseDelimitedFrom(input, extensionRegistry);
20146     }
parseFrom( com.google.protobuf.CodedInputStream input)20147     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(
20148         com.google.protobuf.CodedInputStream input)
20149         throws java.io.IOException {
20150       return PARSER.parseFrom(input);
20151     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20152     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parseFrom(
20153         com.google.protobuf.CodedInputStream input,
20154         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20155         throws java.io.IOException {
20156       return PARSER.parseFrom(input, extensionRegistry);
20157     }
20158 
newBuilder()20159     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()20160     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest prototype)20161     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest prototype) {
20162       return newBuilder().mergeFrom(prototype);
20163     }
toBuilder()20164     public Builder toBuilder() { return newBuilder(this); }
20165 
20166     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)20167     protected Builder newBuilderForType(
20168         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20169       Builder builder = new Builder(parent);
20170       return builder;
20171     }
20172     /**
20173      * Protobuf type {@code ListNamespaceDescriptorsRequest}
20174      */
20175     public static final class Builder extends
20176         com.google.protobuf.GeneratedMessage.Builder<Builder>
20177        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequestOrBuilder {
20178       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()20179           getDescriptor() {
20180         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_descriptor;
20181       }
20182 
20183       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()20184           internalGetFieldAccessorTable() {
20185         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable
20186             .ensureFieldAccessorsInitialized(
20187                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.Builder.class);
20188       }
20189 
20190       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.newBuilder()
Builder()20191       private Builder() {
20192         maybeForceBuilderInitialization();
20193       }
20194 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)20195       private Builder(
20196           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20197         super(parent);
20198         maybeForceBuilderInitialization();
20199       }
maybeForceBuilderInitialization()20200       private void maybeForceBuilderInitialization() {
20201         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20202         }
20203       }
create()20204       private static Builder create() {
20205         return new Builder();
20206       }
20207 
clear()20208       public Builder clear() {
20209         super.clear();
20210         return this;
20211       }
20212 
clone()20213       public Builder clone() {
20214         return create().mergeFrom(buildPartial());
20215       }
20216 
20217       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()20218           getDescriptorForType() {
20219         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsRequest_descriptor;
20220       }
20221 
getDefaultInstanceForType()20222       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest getDefaultInstanceForType() {
20223         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
20224       }
20225 
build()20226       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest build() {
20227         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest result = buildPartial();
20228         if (!result.isInitialized()) {
20229           throw newUninitializedMessageException(result);
20230         }
20231         return result;
20232       }
20233 
buildPartial()20234       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest buildPartial() {
20235         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest(this);
20236         onBuilt();
20237         return result;
20238       }
20239 
mergeFrom(com.google.protobuf.Message other)20240       public Builder mergeFrom(com.google.protobuf.Message other) {
20241         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) {
20242           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)other);
20243         } else {
20244           super.mergeFrom(other);
20245           return this;
20246         }
20247       }
20248 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other)20249       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest other) {
20250         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance()) return this;
20251         this.mergeUnknownFields(other.getUnknownFields());
20252         return this;
20253       }
20254 
isInitialized()20255       public final boolean isInitialized() {
20256         return true;
20257       }
20258 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20259       public Builder mergeFrom(
20260           com.google.protobuf.CodedInputStream input,
20261           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20262           throws java.io.IOException {
20263         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest parsedMessage = null;
20264         try {
20265           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20266         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20267           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) e.getUnfinishedMessage();
20268           throw e;
20269         } finally {
20270           if (parsedMessage != null) {
20271             mergeFrom(parsedMessage);
20272           }
20273         }
20274         return this;
20275       }
20276 
20277       // @@protoc_insertion_point(builder_scope:ListNamespaceDescriptorsRequest)
20278     }
20279 
20280     static {
20281       defaultInstance = new ListNamespaceDescriptorsRequest(true);
defaultInstance.initFields()20282       defaultInstance.initFields();
20283     }
20284 
20285     // @@protoc_insertion_point(class_scope:ListNamespaceDescriptorsRequest)
20286   }
20287 
20288   public interface ListNamespaceDescriptorsResponseOrBuilder
20289       extends com.google.protobuf.MessageOrBuilder {
20290 
20291     // repeated .NamespaceDescriptor namespaceDescriptor = 1;
20292     /**
20293      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20294      */
20295     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor>
getNamespaceDescriptorList()20296         getNamespaceDescriptorList();
20297     /**
20298      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20299      */
getNamespaceDescriptor(int index)20300     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(int index);
20301     /**
20302      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20303      */
getNamespaceDescriptorCount()20304     int getNamespaceDescriptorCount();
20305     /**
20306      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20307      */
20308     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorOrBuilderList()20309         getNamespaceDescriptorOrBuilderList();
20310     /**
20311      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20312      */
getNamespaceDescriptorOrBuilder( int index)20313     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(
20314         int index);
20315   }
20316   /**
20317    * Protobuf type {@code ListNamespaceDescriptorsResponse}
20318    */
20319   public static final class ListNamespaceDescriptorsResponse extends
20320       com.google.protobuf.GeneratedMessage
20321       implements ListNamespaceDescriptorsResponseOrBuilder {
20322     // Use ListNamespaceDescriptorsResponse.newBuilder() to construct.
ListNamespaceDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)20323     private ListNamespaceDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
20324       super(builder);
20325       this.unknownFields = builder.getUnknownFields();
20326     }
ListNamespaceDescriptorsResponse(boolean noInit)20327     private ListNamespaceDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
20328 
20329     private static final ListNamespaceDescriptorsResponse defaultInstance;
getDefaultInstance()20330     public static ListNamespaceDescriptorsResponse getDefaultInstance() {
20331       return defaultInstance;
20332     }
20333 
getDefaultInstanceForType()20334     public ListNamespaceDescriptorsResponse getDefaultInstanceForType() {
20335       return defaultInstance;
20336     }
20337 
20338     private final com.google.protobuf.UnknownFieldSet unknownFields;
20339     @java.lang.Override
20340     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()20341         getUnknownFields() {
20342       return this.unknownFields;
20343     }
ListNamespaceDescriptorsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20344     private ListNamespaceDescriptorsResponse(
20345         com.google.protobuf.CodedInputStream input,
20346         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20347         throws com.google.protobuf.InvalidProtocolBufferException {
20348       initFields();
20349       int mutable_bitField0_ = 0;
20350       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
20351           com.google.protobuf.UnknownFieldSet.newBuilder();
20352       try {
20353         boolean done = false;
20354         while (!done) {
20355           int tag = input.readTag();
20356           switch (tag) {
20357             case 0:
20358               done = true;
20359               break;
20360             default: {
20361               if (!parseUnknownField(input, unknownFields,
20362                                      extensionRegistry, tag)) {
20363                 done = true;
20364               }
20365               break;
20366             }
20367             case 10: {
20368               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
20369                 namespaceDescriptor_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor>();
20370                 mutable_bitField0_ |= 0x00000001;
20371               }
20372               namespaceDescriptor_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.PARSER, extensionRegistry));
20373               break;
20374             }
20375           }
20376         }
20377       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20378         throw e.setUnfinishedMessage(this);
20379       } catch (java.io.IOException e) {
20380         throw new com.google.protobuf.InvalidProtocolBufferException(
20381             e.getMessage()).setUnfinishedMessage(this);
20382       } finally {
20383         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
20384           namespaceDescriptor_ = java.util.Collections.unmodifiableList(namespaceDescriptor_);
20385         }
20386         this.unknownFields = unknownFields.build();
20387         makeExtensionsImmutable();
20388       }
20389     }
20390     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()20391         getDescriptor() {
20392       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_descriptor;
20393     }
20394 
20395     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()20396         internalGetFieldAccessorTable() {
20397       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable
20398           .ensureFieldAccessorsInitialized(
20399               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.Builder.class);
20400     }
20401 
20402     public static com.google.protobuf.Parser<ListNamespaceDescriptorsResponse> PARSER =
20403         new com.google.protobuf.AbstractParser<ListNamespaceDescriptorsResponse>() {
20404       public ListNamespaceDescriptorsResponse parsePartialFrom(
20405           com.google.protobuf.CodedInputStream input,
20406           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20407           throws com.google.protobuf.InvalidProtocolBufferException {
20408         return new ListNamespaceDescriptorsResponse(input, extensionRegistry);
20409       }
20410     };
20411 
20412     @java.lang.Override
getParserForType()20413     public com.google.protobuf.Parser<ListNamespaceDescriptorsResponse> getParserForType() {
20414       return PARSER;
20415     }
20416 
20417     // repeated .NamespaceDescriptor namespaceDescriptor = 1;
20418     public static final int NAMESPACEDESCRIPTOR_FIELD_NUMBER = 1;
20419     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> namespaceDescriptor_;
20420     /**
20421      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20422      */
getNamespaceDescriptorList()20423     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> getNamespaceDescriptorList() {
20424       return namespaceDescriptor_;
20425     }
20426     /**
20427      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20428      */
20429     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorOrBuilderList()20430         getNamespaceDescriptorOrBuilderList() {
20431       return namespaceDescriptor_;
20432     }
20433     /**
20434      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20435      */
getNamespaceDescriptorCount()20436     public int getNamespaceDescriptorCount() {
20437       return namespaceDescriptor_.size();
20438     }
20439     /**
20440      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20441      */
getNamespaceDescriptor(int index)20442     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(int index) {
20443       return namespaceDescriptor_.get(index);
20444     }
20445     /**
20446      * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20447      */
getNamespaceDescriptorOrBuilder( int index)20448     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(
20449         int index) {
20450       return namespaceDescriptor_.get(index);
20451     }
20452 
initFields()20453     private void initFields() {
20454       namespaceDescriptor_ = java.util.Collections.emptyList();
20455     }
20456     private byte memoizedIsInitialized = -1;
isInitialized()20457     public final boolean isInitialized() {
20458       byte isInitialized = memoizedIsInitialized;
20459       if (isInitialized != -1) return isInitialized == 1;
20460 
20461       for (int i = 0; i < getNamespaceDescriptorCount(); i++) {
20462         if (!getNamespaceDescriptor(i).isInitialized()) {
20463           memoizedIsInitialized = 0;
20464           return false;
20465         }
20466       }
20467       memoizedIsInitialized = 1;
20468       return true;
20469     }
20470 
writeTo(com.google.protobuf.CodedOutputStream output)20471     public void writeTo(com.google.protobuf.CodedOutputStream output)
20472                         throws java.io.IOException {
20473       getSerializedSize();
20474       for (int i = 0; i < namespaceDescriptor_.size(); i++) {
20475         output.writeMessage(1, namespaceDescriptor_.get(i));
20476       }
20477       getUnknownFields().writeTo(output);
20478     }
20479 
20480     private int memoizedSerializedSize = -1;
getSerializedSize()20481     public int getSerializedSize() {
20482       int size = memoizedSerializedSize;
20483       if (size != -1) return size;
20484 
20485       size = 0;
20486       for (int i = 0; i < namespaceDescriptor_.size(); i++) {
20487         size += com.google.protobuf.CodedOutputStream
20488           .computeMessageSize(1, namespaceDescriptor_.get(i));
20489       }
20490       size += getUnknownFields().getSerializedSize();
20491       memoizedSerializedSize = size;
20492       return size;
20493     }
20494 
20495     private static final long serialVersionUID = 0L;
20496     @java.lang.Override
writeReplace()20497     protected java.lang.Object writeReplace()
20498         throws java.io.ObjectStreamException {
20499       return super.writeReplace();
20500     }
20501 
20502     @java.lang.Override
equals(final java.lang.Object obj)20503     public boolean equals(final java.lang.Object obj) {
20504       if (obj == this) {
20505        return true;
20506       }
20507       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse)) {
20508         return super.equals(obj);
20509       }
20510       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) obj;
20511 
20512       boolean result = true;
20513       result = result && getNamespaceDescriptorList()
20514           .equals(other.getNamespaceDescriptorList());
20515       result = result &&
20516           getUnknownFields().equals(other.getUnknownFields());
20517       return result;
20518     }
20519 
20520     private int memoizedHashCode = 0;
20521     @java.lang.Override
hashCode()20522     public int hashCode() {
20523       if (memoizedHashCode != 0) {
20524         return memoizedHashCode;
20525       }
20526       int hash = 41;
20527       hash = (19 * hash) + getDescriptorForType().hashCode();
20528       if (getNamespaceDescriptorCount() > 0) {
20529         hash = (37 * hash) + NAMESPACEDESCRIPTOR_FIELD_NUMBER;
20530         hash = (53 * hash) + getNamespaceDescriptorList().hashCode();
20531       }
20532       hash = (29 * hash) + getUnknownFields().hashCode();
20533       memoizedHashCode = hash;
20534       return hash;
20535     }
20536 
parseFrom( com.google.protobuf.ByteString data)20537     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(
20538         com.google.protobuf.ByteString data)
20539         throws com.google.protobuf.InvalidProtocolBufferException {
20540       return PARSER.parseFrom(data);
20541     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20542     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(
20543         com.google.protobuf.ByteString data,
20544         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20545         throws com.google.protobuf.InvalidProtocolBufferException {
20546       return PARSER.parseFrom(data, extensionRegistry);
20547     }
parseFrom(byte[] data)20548     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(byte[] data)
20549         throws com.google.protobuf.InvalidProtocolBufferException {
20550       return PARSER.parseFrom(data);
20551     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20552     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(
20553         byte[] data,
20554         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20555         throws com.google.protobuf.InvalidProtocolBufferException {
20556       return PARSER.parseFrom(data, extensionRegistry);
20557     }
parseFrom(java.io.InputStream input)20558     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(java.io.InputStream input)
20559         throws java.io.IOException {
20560       return PARSER.parseFrom(input);
20561     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20562     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(
20563         java.io.InputStream input,
20564         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20565         throws java.io.IOException {
20566       return PARSER.parseFrom(input, extensionRegistry);
20567     }
parseDelimitedFrom(java.io.InputStream input)20568     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseDelimitedFrom(java.io.InputStream input)
20569         throws java.io.IOException {
20570       return PARSER.parseDelimitedFrom(input);
20571     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20572     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseDelimitedFrom(
20573         java.io.InputStream input,
20574         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20575         throws java.io.IOException {
20576       return PARSER.parseDelimitedFrom(input, extensionRegistry);
20577     }
parseFrom( com.google.protobuf.CodedInputStream input)20578     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(
20579         com.google.protobuf.CodedInputStream input)
20580         throws java.io.IOException {
20581       return PARSER.parseFrom(input);
20582     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20583     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parseFrom(
20584         com.google.protobuf.CodedInputStream input,
20585         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20586         throws java.io.IOException {
20587       return PARSER.parseFrom(input, extensionRegistry);
20588     }
20589 
newBuilder()20590     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()20591     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse prototype)20592     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse prototype) {
20593       return newBuilder().mergeFrom(prototype);
20594     }
toBuilder()20595     public Builder toBuilder() { return newBuilder(this); }
20596 
20597     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)20598     protected Builder newBuilderForType(
20599         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20600       Builder builder = new Builder(parent);
20601       return builder;
20602     }
20603     /**
20604      * Protobuf type {@code ListNamespaceDescriptorsResponse}
20605      */
20606     public static final class Builder extends
20607         com.google.protobuf.GeneratedMessage.Builder<Builder>
20608        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponseOrBuilder {
20609       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()20610           getDescriptor() {
20611         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_descriptor;
20612       }
20613 
20614       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()20615           internalGetFieldAccessorTable() {
20616         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable
20617             .ensureFieldAccessorsInitialized(
20618                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.Builder.class);
20619       }
20620 
20621       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.newBuilder()
Builder()20622       private Builder() {
20623         maybeForceBuilderInitialization();
20624       }
20625 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)20626       private Builder(
20627           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
20628         super(parent);
20629         maybeForceBuilderInitialization();
20630       }
maybeForceBuilderInitialization()20631       private void maybeForceBuilderInitialization() {
20632         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
20633           getNamespaceDescriptorFieldBuilder();
20634         }
20635       }
create()20636       private static Builder create() {
20637         return new Builder();
20638       }
20639 
clear()20640       public Builder clear() {
20641         super.clear();
20642         if (namespaceDescriptorBuilder_ == null) {
20643           namespaceDescriptor_ = java.util.Collections.emptyList();
20644           bitField0_ = (bitField0_ & ~0x00000001);
20645         } else {
20646           namespaceDescriptorBuilder_.clear();
20647         }
20648         return this;
20649       }
20650 
clone()20651       public Builder clone() {
20652         return create().mergeFrom(buildPartial());
20653       }
20654 
20655       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()20656           getDescriptorForType() {
20657         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListNamespaceDescriptorsResponse_descriptor;
20658       }
20659 
getDefaultInstanceForType()20660       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse getDefaultInstanceForType() {
20661         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
20662       }
20663 
build()20664       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse build() {
20665         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse result = buildPartial();
20666         if (!result.isInitialized()) {
20667           throw newUninitializedMessageException(result);
20668         }
20669         return result;
20670       }
20671 
buildPartial()20672       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse buildPartial() {
20673         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse(this);
20674         int from_bitField0_ = bitField0_;
20675         if (namespaceDescriptorBuilder_ == null) {
20676           if (((bitField0_ & 0x00000001) == 0x00000001)) {
20677             namespaceDescriptor_ = java.util.Collections.unmodifiableList(namespaceDescriptor_);
20678             bitField0_ = (bitField0_ & ~0x00000001);
20679           }
20680           result.namespaceDescriptor_ = namespaceDescriptor_;
20681         } else {
20682           result.namespaceDescriptor_ = namespaceDescriptorBuilder_.build();
20683         }
20684         onBuilt();
20685         return result;
20686       }
20687 
mergeFrom(com.google.protobuf.Message other)20688       public Builder mergeFrom(com.google.protobuf.Message other) {
20689         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) {
20690           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse)other);
20691         } else {
20692           super.mergeFrom(other);
20693           return this;
20694         }
20695       }
20696 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse other)20697       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse other) {
20698         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance()) return this;
20699         if (namespaceDescriptorBuilder_ == null) {
20700           if (!other.namespaceDescriptor_.isEmpty()) {
20701             if (namespaceDescriptor_.isEmpty()) {
20702               namespaceDescriptor_ = other.namespaceDescriptor_;
20703               bitField0_ = (bitField0_ & ~0x00000001);
20704             } else {
20705               ensureNamespaceDescriptorIsMutable();
20706               namespaceDescriptor_.addAll(other.namespaceDescriptor_);
20707             }
20708             onChanged();
20709           }
20710         } else {
20711           if (!other.namespaceDescriptor_.isEmpty()) {
20712             if (namespaceDescriptorBuilder_.isEmpty()) {
20713               namespaceDescriptorBuilder_.dispose();
20714               namespaceDescriptorBuilder_ = null;
20715               namespaceDescriptor_ = other.namespaceDescriptor_;
20716               bitField0_ = (bitField0_ & ~0x00000001);
20717               namespaceDescriptorBuilder_ =
20718                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
20719                    getNamespaceDescriptorFieldBuilder() : null;
20720             } else {
20721               namespaceDescriptorBuilder_.addAllMessages(other.namespaceDescriptor_);
20722             }
20723           }
20724         }
20725         this.mergeUnknownFields(other.getUnknownFields());
20726         return this;
20727       }
20728 
isInitialized()20729       public final boolean isInitialized() {
20730         for (int i = 0; i < getNamespaceDescriptorCount(); i++) {
20731           if (!getNamespaceDescriptor(i).isInitialized()) {
20732 
20733             return false;
20734           }
20735         }
20736         return true;
20737       }
20738 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20739       public Builder mergeFrom(
20740           com.google.protobuf.CodedInputStream input,
20741           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20742           throws java.io.IOException {
20743         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse parsedMessage = null;
20744         try {
20745           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20746         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20747           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) e.getUnfinishedMessage();
20748           throw e;
20749         } finally {
20750           if (parsedMessage != null) {
20751             mergeFrom(parsedMessage);
20752           }
20753         }
20754         return this;
20755       }
20756       private int bitField0_;
20757 
20758       // repeated .NamespaceDescriptor namespaceDescriptor = 1;
20759       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> namespaceDescriptor_ =
20760         java.util.Collections.emptyList();
ensureNamespaceDescriptorIsMutable()20761       private void ensureNamespaceDescriptorIsMutable() {
20762         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
20763           namespaceDescriptor_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor>(namespaceDescriptor_);
20764           bitField0_ |= 0x00000001;
20765          }
20766       }
20767 
20768       private com.google.protobuf.RepeatedFieldBuilder<
20769           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder> namespaceDescriptorBuilder_;
20770 
20771       /**
20772        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20773        */
getNamespaceDescriptorList()20774       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> getNamespaceDescriptorList() {
20775         if (namespaceDescriptorBuilder_ == null) {
20776           return java.util.Collections.unmodifiableList(namespaceDescriptor_);
20777         } else {
20778           return namespaceDescriptorBuilder_.getMessageList();
20779         }
20780       }
20781       /**
20782        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20783        */
getNamespaceDescriptorCount()20784       public int getNamespaceDescriptorCount() {
20785         if (namespaceDescriptorBuilder_ == null) {
20786           return namespaceDescriptor_.size();
20787         } else {
20788           return namespaceDescriptorBuilder_.getCount();
20789         }
20790       }
20791       /**
20792        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20793        */
getNamespaceDescriptor(int index)20794       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor getNamespaceDescriptor(int index) {
20795         if (namespaceDescriptorBuilder_ == null) {
20796           return namespaceDescriptor_.get(index);
20797         } else {
20798           return namespaceDescriptorBuilder_.getMessage(index);
20799         }
20800       }
20801       /**
20802        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20803        */
setNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)20804       public Builder setNamespaceDescriptor(
20805           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
20806         if (namespaceDescriptorBuilder_ == null) {
20807           if (value == null) {
20808             throw new NullPointerException();
20809           }
20810           ensureNamespaceDescriptorIsMutable();
20811           namespaceDescriptor_.set(index, value);
20812           onChanged();
20813         } else {
20814           namespaceDescriptorBuilder_.setMessage(index, value);
20815         }
20816         return this;
20817       }
20818       /**
20819        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20820        */
setNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue)20821       public Builder setNamespaceDescriptor(
20822           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) {
20823         if (namespaceDescriptorBuilder_ == null) {
20824           ensureNamespaceDescriptorIsMutable();
20825           namespaceDescriptor_.set(index, builderForValue.build());
20826           onChanged();
20827         } else {
20828           namespaceDescriptorBuilder_.setMessage(index, builderForValue.build());
20829         }
20830         return this;
20831       }
20832       /**
20833        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20834        */
addNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)20835       public Builder addNamespaceDescriptor(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
20836         if (namespaceDescriptorBuilder_ == null) {
20837           if (value == null) {
20838             throw new NullPointerException();
20839           }
20840           ensureNamespaceDescriptorIsMutable();
20841           namespaceDescriptor_.add(value);
20842           onChanged();
20843         } else {
20844           namespaceDescriptorBuilder_.addMessage(value);
20845         }
20846         return this;
20847       }
20848       /**
20849        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20850        */
addNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value)20851       public Builder addNamespaceDescriptor(
20852           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor value) {
20853         if (namespaceDescriptorBuilder_ == null) {
20854           if (value == null) {
20855             throw new NullPointerException();
20856           }
20857           ensureNamespaceDescriptorIsMutable();
20858           namespaceDescriptor_.add(index, value);
20859           onChanged();
20860         } else {
20861           namespaceDescriptorBuilder_.addMessage(index, value);
20862         }
20863         return this;
20864       }
20865       /**
20866        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20867        */
addNamespaceDescriptor( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue)20868       public Builder addNamespaceDescriptor(
20869           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) {
20870         if (namespaceDescriptorBuilder_ == null) {
20871           ensureNamespaceDescriptorIsMutable();
20872           namespaceDescriptor_.add(builderForValue.build());
20873           onChanged();
20874         } else {
20875           namespaceDescriptorBuilder_.addMessage(builderForValue.build());
20876         }
20877         return this;
20878       }
20879       /**
20880        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20881        */
addNamespaceDescriptor( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue)20882       public Builder addNamespaceDescriptor(
20883           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder builderForValue) {
20884         if (namespaceDescriptorBuilder_ == null) {
20885           ensureNamespaceDescriptorIsMutable();
20886           namespaceDescriptor_.add(index, builderForValue.build());
20887           onChanged();
20888         } else {
20889           namespaceDescriptorBuilder_.addMessage(index, builderForValue.build());
20890         }
20891         return this;
20892       }
20893       /**
20894        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20895        */
addAllNamespaceDescriptor( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> values)20896       public Builder addAllNamespaceDescriptor(
20897           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor> values) {
20898         if (namespaceDescriptorBuilder_ == null) {
20899           ensureNamespaceDescriptorIsMutable();
20900           super.addAll(values, namespaceDescriptor_);
20901           onChanged();
20902         } else {
20903           namespaceDescriptorBuilder_.addAllMessages(values);
20904         }
20905         return this;
20906       }
20907       /**
20908        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20909        */
clearNamespaceDescriptor()20910       public Builder clearNamespaceDescriptor() {
20911         if (namespaceDescriptorBuilder_ == null) {
20912           namespaceDescriptor_ = java.util.Collections.emptyList();
20913           bitField0_ = (bitField0_ & ~0x00000001);
20914           onChanged();
20915         } else {
20916           namespaceDescriptorBuilder_.clear();
20917         }
20918         return this;
20919       }
20920       /**
20921        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20922        */
removeNamespaceDescriptor(int index)20923       public Builder removeNamespaceDescriptor(int index) {
20924         if (namespaceDescriptorBuilder_ == null) {
20925           ensureNamespaceDescriptorIsMutable();
20926           namespaceDescriptor_.remove(index);
20927           onChanged();
20928         } else {
20929           namespaceDescriptorBuilder_.remove(index);
20930         }
20931         return this;
20932       }
20933       /**
20934        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20935        */
getNamespaceDescriptorBuilder( int index)20936       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder getNamespaceDescriptorBuilder(
20937           int index) {
20938         return getNamespaceDescriptorFieldBuilder().getBuilder(index);
20939       }
20940       /**
20941        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20942        */
getNamespaceDescriptorOrBuilder( int index)20943       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder getNamespaceDescriptorOrBuilder(
20944           int index) {
20945         if (namespaceDescriptorBuilder_ == null) {
20946           return namespaceDescriptor_.get(index);  } else {
20947           return namespaceDescriptorBuilder_.getMessageOrBuilder(index);
20948         }
20949       }
20950       /**
20951        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20952        */
20953       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorOrBuilderList()20954            getNamespaceDescriptorOrBuilderList() {
20955         if (namespaceDescriptorBuilder_ != null) {
20956           return namespaceDescriptorBuilder_.getMessageOrBuilderList();
20957         } else {
20958           return java.util.Collections.unmodifiableList(namespaceDescriptor_);
20959         }
20960       }
20961       /**
20962        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20963        */
addNamespaceDescriptorBuilder()20964       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder addNamespaceDescriptorBuilder() {
20965         return getNamespaceDescriptorFieldBuilder().addBuilder(
20966             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance());
20967       }
20968       /**
20969        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20970        */
addNamespaceDescriptorBuilder( int index)20971       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder addNamespaceDescriptorBuilder(
20972           int index) {
20973         return getNamespaceDescriptorFieldBuilder().addBuilder(
20974             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.getDefaultInstance());
20975       }
20976       /**
20977        * <code>repeated .NamespaceDescriptor namespaceDescriptor = 1;</code>
20978        */
20979       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder>
getNamespaceDescriptorBuilderList()20980            getNamespaceDescriptorBuilderList() {
20981         return getNamespaceDescriptorFieldBuilder().getBuilderList();
20982       }
20983       private com.google.protobuf.RepeatedFieldBuilder<
20984           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>
getNamespaceDescriptorFieldBuilder()20985           getNamespaceDescriptorFieldBuilder() {
20986         if (namespaceDescriptorBuilder_ == null) {
20987           namespaceDescriptorBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
20988               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NamespaceDescriptorOrBuilder>(
20989                   namespaceDescriptor_,
20990                   ((bitField0_ & 0x00000001) == 0x00000001),
20991                   getParentForChildren(),
20992                   isClean());
20993           namespaceDescriptor_ = null;
20994         }
20995         return namespaceDescriptorBuilder_;
20996       }
20997 
20998       // @@protoc_insertion_point(builder_scope:ListNamespaceDescriptorsResponse)
20999     }
21000 
21001     static {
21002       defaultInstance = new ListNamespaceDescriptorsResponse(true);
defaultInstance.initFields()21003       defaultInstance.initFields();
21004     }
21005 
21006     // @@protoc_insertion_point(class_scope:ListNamespaceDescriptorsResponse)
21007   }
21008 
21009   public interface ListTableDescriptorsByNamespaceRequestOrBuilder
21010       extends com.google.protobuf.MessageOrBuilder {
21011 
21012     // required string namespaceName = 1;
21013     /**
21014      * <code>required string namespaceName = 1;</code>
21015      */
hasNamespaceName()21016     boolean hasNamespaceName();
21017     /**
21018      * <code>required string namespaceName = 1;</code>
21019      */
getNamespaceName()21020     java.lang.String getNamespaceName();
21021     /**
21022      * <code>required string namespaceName = 1;</code>
21023      */
21024     com.google.protobuf.ByteString
getNamespaceNameBytes()21025         getNamespaceNameBytes();
21026   }
21027   /**
21028    * Protobuf type {@code ListTableDescriptorsByNamespaceRequest}
21029    */
21030   public static final class ListTableDescriptorsByNamespaceRequest extends
21031       com.google.protobuf.GeneratedMessage
21032       implements ListTableDescriptorsByNamespaceRequestOrBuilder {
21033     // Use ListTableDescriptorsByNamespaceRequest.newBuilder() to construct.
ListTableDescriptorsByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)21034     private ListTableDescriptorsByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21035       super(builder);
21036       this.unknownFields = builder.getUnknownFields();
21037     }
ListTableDescriptorsByNamespaceRequest(boolean noInit)21038     private ListTableDescriptorsByNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21039 
21040     private static final ListTableDescriptorsByNamespaceRequest defaultInstance;
getDefaultInstance()21041     public static ListTableDescriptorsByNamespaceRequest getDefaultInstance() {
21042       return defaultInstance;
21043     }
21044 
getDefaultInstanceForType()21045     public ListTableDescriptorsByNamespaceRequest getDefaultInstanceForType() {
21046       return defaultInstance;
21047     }
21048 
21049     private final com.google.protobuf.UnknownFieldSet unknownFields;
21050     @java.lang.Override
21051     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()21052         getUnknownFields() {
21053       return this.unknownFields;
21054     }
ListTableDescriptorsByNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21055     private ListTableDescriptorsByNamespaceRequest(
21056         com.google.protobuf.CodedInputStream input,
21057         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21058         throws com.google.protobuf.InvalidProtocolBufferException {
21059       initFields();
21060       int mutable_bitField0_ = 0;
21061       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21062           com.google.protobuf.UnknownFieldSet.newBuilder();
21063       try {
21064         boolean done = false;
21065         while (!done) {
21066           int tag = input.readTag();
21067           switch (tag) {
21068             case 0:
21069               done = true;
21070               break;
21071             default: {
21072               if (!parseUnknownField(input, unknownFields,
21073                                      extensionRegistry, tag)) {
21074                 done = true;
21075               }
21076               break;
21077             }
21078             case 10: {
21079               bitField0_ |= 0x00000001;
21080               namespaceName_ = input.readBytes();
21081               break;
21082             }
21083           }
21084         }
21085       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21086         throw e.setUnfinishedMessage(this);
21087       } catch (java.io.IOException e) {
21088         throw new com.google.protobuf.InvalidProtocolBufferException(
21089             e.getMessage()).setUnfinishedMessage(this);
21090       } finally {
21091         this.unknownFields = unknownFields.build();
21092         makeExtensionsImmutable();
21093       }
21094     }
21095     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21096         getDescriptor() {
21097       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_descriptor;
21098     }
21099 
21100     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21101         internalGetFieldAccessorTable() {
21102       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable
21103           .ensureFieldAccessorsInitialized(
21104               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.Builder.class);
21105     }
21106 
21107     public static com.google.protobuf.Parser<ListTableDescriptorsByNamespaceRequest> PARSER =
21108         new com.google.protobuf.AbstractParser<ListTableDescriptorsByNamespaceRequest>() {
21109       public ListTableDescriptorsByNamespaceRequest parsePartialFrom(
21110           com.google.protobuf.CodedInputStream input,
21111           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21112           throws com.google.protobuf.InvalidProtocolBufferException {
21113         return new ListTableDescriptorsByNamespaceRequest(input, extensionRegistry);
21114       }
21115     };
21116 
21117     @java.lang.Override
getParserForType()21118     public com.google.protobuf.Parser<ListTableDescriptorsByNamespaceRequest> getParserForType() {
21119       return PARSER;
21120     }
21121 
21122     private int bitField0_;
21123     // required string namespaceName = 1;
21124     public static final int NAMESPACENAME_FIELD_NUMBER = 1;
21125     private java.lang.Object namespaceName_;
21126     /**
21127      * <code>required string namespaceName = 1;</code>
21128      */
hasNamespaceName()21129     public boolean hasNamespaceName() {
21130       return ((bitField0_ & 0x00000001) == 0x00000001);
21131     }
21132     /**
21133      * <code>required string namespaceName = 1;</code>
21134      */
getNamespaceName()21135     public java.lang.String getNamespaceName() {
21136       java.lang.Object ref = namespaceName_;
21137       if (ref instanceof java.lang.String) {
21138         return (java.lang.String) ref;
21139       } else {
21140         com.google.protobuf.ByteString bs =
21141             (com.google.protobuf.ByteString) ref;
21142         java.lang.String s = bs.toStringUtf8();
21143         if (bs.isValidUtf8()) {
21144           namespaceName_ = s;
21145         }
21146         return s;
21147       }
21148     }
21149     /**
21150      * <code>required string namespaceName = 1;</code>
21151      */
21152     public com.google.protobuf.ByteString
getNamespaceNameBytes()21153         getNamespaceNameBytes() {
21154       java.lang.Object ref = namespaceName_;
21155       if (ref instanceof java.lang.String) {
21156         com.google.protobuf.ByteString b =
21157             com.google.protobuf.ByteString.copyFromUtf8(
21158                 (java.lang.String) ref);
21159         namespaceName_ = b;
21160         return b;
21161       } else {
21162         return (com.google.protobuf.ByteString) ref;
21163       }
21164     }
21165 
initFields()21166     private void initFields() {
21167       namespaceName_ = "";
21168     }
21169     private byte memoizedIsInitialized = -1;
isInitialized()21170     public final boolean isInitialized() {
21171       byte isInitialized = memoizedIsInitialized;
21172       if (isInitialized != -1) return isInitialized == 1;
21173 
21174       if (!hasNamespaceName()) {
21175         memoizedIsInitialized = 0;
21176         return false;
21177       }
21178       memoizedIsInitialized = 1;
21179       return true;
21180     }
21181 
writeTo(com.google.protobuf.CodedOutputStream output)21182     public void writeTo(com.google.protobuf.CodedOutputStream output)
21183                         throws java.io.IOException {
21184       getSerializedSize();
21185       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21186         output.writeBytes(1, getNamespaceNameBytes());
21187       }
21188       getUnknownFields().writeTo(output);
21189     }
21190 
21191     private int memoizedSerializedSize = -1;
getSerializedSize()21192     public int getSerializedSize() {
21193       int size = memoizedSerializedSize;
21194       if (size != -1) return size;
21195 
21196       size = 0;
21197       if (((bitField0_ & 0x00000001) == 0x00000001)) {
21198         size += com.google.protobuf.CodedOutputStream
21199           .computeBytesSize(1, getNamespaceNameBytes());
21200       }
21201       size += getUnknownFields().getSerializedSize();
21202       memoizedSerializedSize = size;
21203       return size;
21204     }
21205 
21206     private static final long serialVersionUID = 0L;
21207     @java.lang.Override
writeReplace()21208     protected java.lang.Object writeReplace()
21209         throws java.io.ObjectStreamException {
21210       return super.writeReplace();
21211     }
21212 
21213     @java.lang.Override
equals(final java.lang.Object obj)21214     public boolean equals(final java.lang.Object obj) {
21215       if (obj == this) {
21216        return true;
21217       }
21218       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)) {
21219         return super.equals(obj);
21220       }
21221       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) obj;
21222 
21223       boolean result = true;
21224       result = result && (hasNamespaceName() == other.hasNamespaceName());
21225       if (hasNamespaceName()) {
21226         result = result && getNamespaceName()
21227             .equals(other.getNamespaceName());
21228       }
21229       result = result &&
21230           getUnknownFields().equals(other.getUnknownFields());
21231       return result;
21232     }
21233 
21234     private int memoizedHashCode = 0;
21235     @java.lang.Override
hashCode()21236     public int hashCode() {
21237       if (memoizedHashCode != 0) {
21238         return memoizedHashCode;
21239       }
21240       int hash = 41;
21241       hash = (19 * hash) + getDescriptorForType().hashCode();
21242       if (hasNamespaceName()) {
21243         hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER;
21244         hash = (53 * hash) + getNamespaceName().hashCode();
21245       }
21246       hash = (29 * hash) + getUnknownFields().hashCode();
21247       memoizedHashCode = hash;
21248       return hash;
21249     }
21250 
parseFrom( com.google.protobuf.ByteString data)21251     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(
21252         com.google.protobuf.ByteString data)
21253         throws com.google.protobuf.InvalidProtocolBufferException {
21254       return PARSER.parseFrom(data);
21255     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21256     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(
21257         com.google.protobuf.ByteString data,
21258         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21259         throws com.google.protobuf.InvalidProtocolBufferException {
21260       return PARSER.parseFrom(data, extensionRegistry);
21261     }
parseFrom(byte[] data)21262     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(byte[] data)
21263         throws com.google.protobuf.InvalidProtocolBufferException {
21264       return PARSER.parseFrom(data);
21265     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21266     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(
21267         byte[] data,
21268         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21269         throws com.google.protobuf.InvalidProtocolBufferException {
21270       return PARSER.parseFrom(data, extensionRegistry);
21271     }
parseFrom(java.io.InputStream input)21272     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(java.io.InputStream input)
21273         throws java.io.IOException {
21274       return PARSER.parseFrom(input);
21275     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21276     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(
21277         java.io.InputStream input,
21278         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21279         throws java.io.IOException {
21280       return PARSER.parseFrom(input, extensionRegistry);
21281     }
parseDelimitedFrom(java.io.InputStream input)21282     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseDelimitedFrom(java.io.InputStream input)
21283         throws java.io.IOException {
21284       return PARSER.parseDelimitedFrom(input);
21285     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21286     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseDelimitedFrom(
21287         java.io.InputStream input,
21288         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21289         throws java.io.IOException {
21290       return PARSER.parseDelimitedFrom(input, extensionRegistry);
21291     }
parseFrom( com.google.protobuf.CodedInputStream input)21292     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(
21293         com.google.protobuf.CodedInputStream input)
21294         throws java.io.IOException {
21295       return PARSER.parseFrom(input);
21296     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21297     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parseFrom(
21298         com.google.protobuf.CodedInputStream input,
21299         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21300         throws java.io.IOException {
21301       return PARSER.parseFrom(input, extensionRegistry);
21302     }
21303 
newBuilder()21304     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()21305     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest prototype)21306     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest prototype) {
21307       return newBuilder().mergeFrom(prototype);
21308     }
toBuilder()21309     public Builder toBuilder() { return newBuilder(this); }
21310 
21311     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)21312     protected Builder newBuilderForType(
21313         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21314       Builder builder = new Builder(parent);
21315       return builder;
21316     }
21317     /**
21318      * Protobuf type {@code ListTableDescriptorsByNamespaceRequest}
21319      */
21320     public static final class Builder extends
21321         com.google.protobuf.GeneratedMessage.Builder<Builder>
21322        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequestOrBuilder {
21323       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21324           getDescriptor() {
21325         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_descriptor;
21326       }
21327 
21328       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21329           internalGetFieldAccessorTable() {
21330         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable
21331             .ensureFieldAccessorsInitialized(
21332                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.Builder.class);
21333       }
21334 
21335       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.newBuilder()
Builder()21336       private Builder() {
21337         maybeForceBuilderInitialization();
21338       }
21339 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)21340       private Builder(
21341           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21342         super(parent);
21343         maybeForceBuilderInitialization();
21344       }
maybeForceBuilderInitialization()21345       private void maybeForceBuilderInitialization() {
21346         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21347         }
21348       }
create()21349       private static Builder create() {
21350         return new Builder();
21351       }
21352 
clear()21353       public Builder clear() {
21354         super.clear();
21355         namespaceName_ = "";
21356         bitField0_ = (bitField0_ & ~0x00000001);
21357         return this;
21358       }
21359 
clone()21360       public Builder clone() {
21361         return create().mergeFrom(buildPartial());
21362       }
21363 
21364       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()21365           getDescriptorForType() {
21366         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceRequest_descriptor;
21367       }
21368 
getDefaultInstanceForType()21369       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest getDefaultInstanceForType() {
21370         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
21371       }
21372 
build()21373       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest build() {
21374         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest result = buildPartial();
21375         if (!result.isInitialized()) {
21376           throw newUninitializedMessageException(result);
21377         }
21378         return result;
21379       }
21380 
buildPartial()21381       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest buildPartial() {
21382         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest(this);
21383         int from_bitField0_ = bitField0_;
21384         int to_bitField0_ = 0;
21385         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21386           to_bitField0_ |= 0x00000001;
21387         }
21388         result.namespaceName_ = namespaceName_;
21389         result.bitField0_ = to_bitField0_;
21390         onBuilt();
21391         return result;
21392       }
21393 
mergeFrom(com.google.protobuf.Message other)21394       public Builder mergeFrom(com.google.protobuf.Message other) {
21395         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) {
21396           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)other);
21397         } else {
21398           super.mergeFrom(other);
21399           return this;
21400         }
21401       }
21402 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest other)21403       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest other) {
21404         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance()) return this;
21405         if (other.hasNamespaceName()) {
21406           bitField0_ |= 0x00000001;
21407           namespaceName_ = other.namespaceName_;
21408           onChanged();
21409         }
21410         this.mergeUnknownFields(other.getUnknownFields());
21411         return this;
21412       }
21413 
isInitialized()21414       public final boolean isInitialized() {
21415         if (!hasNamespaceName()) {
21416 
21417           return false;
21418         }
21419         return true;
21420       }
21421 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21422       public Builder mergeFrom(
21423           com.google.protobuf.CodedInputStream input,
21424           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21425           throws java.io.IOException {
21426         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest parsedMessage = null;
21427         try {
21428           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21429         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21430           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest) e.getUnfinishedMessage();
21431           throw e;
21432         } finally {
21433           if (parsedMessage != null) {
21434             mergeFrom(parsedMessage);
21435           }
21436         }
21437         return this;
21438       }
21439       private int bitField0_;
21440 
21441       // required string namespaceName = 1;
21442       private java.lang.Object namespaceName_ = "";
21443       /**
21444        * <code>required string namespaceName = 1;</code>
21445        */
hasNamespaceName()21446       public boolean hasNamespaceName() {
21447         return ((bitField0_ & 0x00000001) == 0x00000001);
21448       }
21449       /**
21450        * <code>required string namespaceName = 1;</code>
21451        */
getNamespaceName()21452       public java.lang.String getNamespaceName() {
21453         java.lang.Object ref = namespaceName_;
21454         if (!(ref instanceof java.lang.String)) {
21455           java.lang.String s = ((com.google.protobuf.ByteString) ref)
21456               .toStringUtf8();
21457           namespaceName_ = s;
21458           return s;
21459         } else {
21460           return (java.lang.String) ref;
21461         }
21462       }
21463       /**
21464        * <code>required string namespaceName = 1;</code>
21465        */
21466       public com.google.protobuf.ByteString
getNamespaceNameBytes()21467           getNamespaceNameBytes() {
21468         java.lang.Object ref = namespaceName_;
21469         if (ref instanceof String) {
21470           com.google.protobuf.ByteString b =
21471               com.google.protobuf.ByteString.copyFromUtf8(
21472                   (java.lang.String) ref);
21473           namespaceName_ = b;
21474           return b;
21475         } else {
21476           return (com.google.protobuf.ByteString) ref;
21477         }
21478       }
21479       /**
21480        * <code>required string namespaceName = 1;</code>
21481        */
setNamespaceName( java.lang.String value)21482       public Builder setNamespaceName(
21483           java.lang.String value) {
21484         if (value == null) {
21485     throw new NullPointerException();
21486   }
21487   bitField0_ |= 0x00000001;
21488         namespaceName_ = value;
21489         onChanged();
21490         return this;
21491       }
21492       /**
21493        * <code>required string namespaceName = 1;</code>
21494        */
clearNamespaceName()21495       public Builder clearNamespaceName() {
21496         bitField0_ = (bitField0_ & ~0x00000001);
21497         namespaceName_ = getDefaultInstance().getNamespaceName();
21498         onChanged();
21499         return this;
21500       }
21501       /**
21502        * <code>required string namespaceName = 1;</code>
21503        */
setNamespaceNameBytes( com.google.protobuf.ByteString value)21504       public Builder setNamespaceNameBytes(
21505           com.google.protobuf.ByteString value) {
21506         if (value == null) {
21507     throw new NullPointerException();
21508   }
21509   bitField0_ |= 0x00000001;
21510         namespaceName_ = value;
21511         onChanged();
21512         return this;
21513       }
21514 
21515       // @@protoc_insertion_point(builder_scope:ListTableDescriptorsByNamespaceRequest)
21516     }
21517 
21518     static {
21519       defaultInstance = new ListTableDescriptorsByNamespaceRequest(true);
defaultInstance.initFields()21520       defaultInstance.initFields();
21521     }
21522 
21523     // @@protoc_insertion_point(class_scope:ListTableDescriptorsByNamespaceRequest)
21524   }
21525 
21526   public interface ListTableDescriptorsByNamespaceResponseOrBuilder
21527       extends com.google.protobuf.MessageOrBuilder {
21528 
21529     // repeated .TableSchema tableSchema = 1;
21530     /**
21531      * <code>repeated .TableSchema tableSchema = 1;</code>
21532      */
21533     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>
getTableSchemaList()21534         getTableSchemaList();
21535     /**
21536      * <code>repeated .TableSchema tableSchema = 1;</code>
21537      */
getTableSchema(int index)21538     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index);
21539     /**
21540      * <code>repeated .TableSchema tableSchema = 1;</code>
21541      */
getTableSchemaCount()21542     int getTableSchemaCount();
21543     /**
21544      * <code>repeated .TableSchema tableSchema = 1;</code>
21545      */
21546     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaOrBuilderList()21547         getTableSchemaOrBuilderList();
21548     /**
21549      * <code>repeated .TableSchema tableSchema = 1;</code>
21550      */
getTableSchemaOrBuilder( int index)21551     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(
21552         int index);
21553   }
21554   /**
21555    * Protobuf type {@code ListTableDescriptorsByNamespaceResponse}
21556    */
21557   public static final class ListTableDescriptorsByNamespaceResponse extends
21558       com.google.protobuf.GeneratedMessage
21559       implements ListTableDescriptorsByNamespaceResponseOrBuilder {
21560     // Use ListTableDescriptorsByNamespaceResponse.newBuilder() to construct.
ListTableDescriptorsByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)21561     private ListTableDescriptorsByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21562       super(builder);
21563       this.unknownFields = builder.getUnknownFields();
21564     }
ListTableDescriptorsByNamespaceResponse(boolean noInit)21565     private ListTableDescriptorsByNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21566 
21567     private static final ListTableDescriptorsByNamespaceResponse defaultInstance;
getDefaultInstance()21568     public static ListTableDescriptorsByNamespaceResponse getDefaultInstance() {
21569       return defaultInstance;
21570     }
21571 
getDefaultInstanceForType()21572     public ListTableDescriptorsByNamespaceResponse getDefaultInstanceForType() {
21573       return defaultInstance;
21574     }
21575 
21576     private final com.google.protobuf.UnknownFieldSet unknownFields;
21577     @java.lang.Override
21578     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()21579         getUnknownFields() {
21580       return this.unknownFields;
21581     }
ListTableDescriptorsByNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21582     private ListTableDescriptorsByNamespaceResponse(
21583         com.google.protobuf.CodedInputStream input,
21584         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21585         throws com.google.protobuf.InvalidProtocolBufferException {
21586       initFields();
21587       int mutable_bitField0_ = 0;
21588       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21589           com.google.protobuf.UnknownFieldSet.newBuilder();
21590       try {
21591         boolean done = false;
21592         while (!done) {
21593           int tag = input.readTag();
21594           switch (tag) {
21595             case 0:
21596               done = true;
21597               break;
21598             default: {
21599               if (!parseUnknownField(input, unknownFields,
21600                                      extensionRegistry, tag)) {
21601                 done = true;
21602               }
21603               break;
21604             }
21605             case 10: {
21606               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
21607                 tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>();
21608                 mutable_bitField0_ |= 0x00000001;
21609               }
21610               tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry));
21611               break;
21612             }
21613           }
21614         }
21615       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21616         throw e.setUnfinishedMessage(this);
21617       } catch (java.io.IOException e) {
21618         throw new com.google.protobuf.InvalidProtocolBufferException(
21619             e.getMessage()).setUnfinishedMessage(this);
21620       } finally {
21621         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
21622           tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_);
21623         }
21624         this.unknownFields = unknownFields.build();
21625         makeExtensionsImmutable();
21626       }
21627     }
21628     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21629         getDescriptor() {
21630       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_descriptor;
21631     }
21632 
21633     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21634         internalGetFieldAccessorTable() {
21635       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable
21636           .ensureFieldAccessorsInitialized(
21637               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.Builder.class);
21638     }
21639 
21640     public static com.google.protobuf.Parser<ListTableDescriptorsByNamespaceResponse> PARSER =
21641         new com.google.protobuf.AbstractParser<ListTableDescriptorsByNamespaceResponse>() {
21642       public ListTableDescriptorsByNamespaceResponse parsePartialFrom(
21643           com.google.protobuf.CodedInputStream input,
21644           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21645           throws com.google.protobuf.InvalidProtocolBufferException {
21646         return new ListTableDescriptorsByNamespaceResponse(input, extensionRegistry);
21647       }
21648     };
21649 
21650     @java.lang.Override
getParserForType()21651     public com.google.protobuf.Parser<ListTableDescriptorsByNamespaceResponse> getParserForType() {
21652       return PARSER;
21653     }
21654 
21655     // repeated .TableSchema tableSchema = 1;
21656     public static final int TABLESCHEMA_FIELD_NUMBER = 1;
21657     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_;
21658     /**
21659      * <code>repeated .TableSchema tableSchema = 1;</code>
21660      */
getTableSchemaList()21661     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() {
21662       return tableSchema_;
21663     }
21664     /**
21665      * <code>repeated .TableSchema tableSchema = 1;</code>
21666      */
21667     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaOrBuilderList()21668         getTableSchemaOrBuilderList() {
21669       return tableSchema_;
21670     }
21671     /**
21672      * <code>repeated .TableSchema tableSchema = 1;</code>
21673      */
getTableSchemaCount()21674     public int getTableSchemaCount() {
21675       return tableSchema_.size();
21676     }
21677     /**
21678      * <code>repeated .TableSchema tableSchema = 1;</code>
21679      */
getTableSchema(int index)21680     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) {
21681       return tableSchema_.get(index);
21682     }
21683     /**
21684      * <code>repeated .TableSchema tableSchema = 1;</code>
21685      */
getTableSchemaOrBuilder( int index)21686     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(
21687         int index) {
21688       return tableSchema_.get(index);
21689     }
21690 
initFields()21691     private void initFields() {
21692       tableSchema_ = java.util.Collections.emptyList();
21693     }
21694     private byte memoizedIsInitialized = -1;
isInitialized()21695     public final boolean isInitialized() {
21696       byte isInitialized = memoizedIsInitialized;
21697       if (isInitialized != -1) return isInitialized == 1;
21698 
21699       for (int i = 0; i < getTableSchemaCount(); i++) {
21700         if (!getTableSchema(i).isInitialized()) {
21701           memoizedIsInitialized = 0;
21702           return false;
21703         }
21704       }
21705       memoizedIsInitialized = 1;
21706       return true;
21707     }
21708 
writeTo(com.google.protobuf.CodedOutputStream output)21709     public void writeTo(com.google.protobuf.CodedOutputStream output)
21710                         throws java.io.IOException {
21711       getSerializedSize();
21712       for (int i = 0; i < tableSchema_.size(); i++) {
21713         output.writeMessage(1, tableSchema_.get(i));
21714       }
21715       getUnknownFields().writeTo(output);
21716     }
21717 
21718     private int memoizedSerializedSize = -1;
getSerializedSize()21719     public int getSerializedSize() {
21720       int size = memoizedSerializedSize;
21721       if (size != -1) return size;
21722 
21723       size = 0;
21724       for (int i = 0; i < tableSchema_.size(); i++) {
21725         size += com.google.protobuf.CodedOutputStream
21726           .computeMessageSize(1, tableSchema_.get(i));
21727       }
21728       size += getUnknownFields().getSerializedSize();
21729       memoizedSerializedSize = size;
21730       return size;
21731     }
21732 
21733     private static final long serialVersionUID = 0L;
21734     @java.lang.Override
writeReplace()21735     protected java.lang.Object writeReplace()
21736         throws java.io.ObjectStreamException {
21737       return super.writeReplace();
21738     }
21739 
21740     @java.lang.Override
equals(final java.lang.Object obj)21741     public boolean equals(final java.lang.Object obj) {
21742       if (obj == this) {
21743        return true;
21744       }
21745       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse)) {
21746         return super.equals(obj);
21747       }
21748       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) obj;
21749 
21750       boolean result = true;
21751       result = result && getTableSchemaList()
21752           .equals(other.getTableSchemaList());
21753       result = result &&
21754           getUnknownFields().equals(other.getUnknownFields());
21755       return result;
21756     }
21757 
21758     private int memoizedHashCode = 0;
21759     @java.lang.Override
hashCode()21760     public int hashCode() {
21761       if (memoizedHashCode != 0) {
21762         return memoizedHashCode;
21763       }
21764       int hash = 41;
21765       hash = (19 * hash) + getDescriptorForType().hashCode();
21766       if (getTableSchemaCount() > 0) {
21767         hash = (37 * hash) + TABLESCHEMA_FIELD_NUMBER;
21768         hash = (53 * hash) + getTableSchemaList().hashCode();
21769       }
21770       hash = (29 * hash) + getUnknownFields().hashCode();
21771       memoizedHashCode = hash;
21772       return hash;
21773     }
21774 
parseFrom( com.google.protobuf.ByteString data)21775     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(
21776         com.google.protobuf.ByteString data)
21777         throws com.google.protobuf.InvalidProtocolBufferException {
21778       return PARSER.parseFrom(data);
21779     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21780     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(
21781         com.google.protobuf.ByteString data,
21782         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21783         throws com.google.protobuf.InvalidProtocolBufferException {
21784       return PARSER.parseFrom(data, extensionRegistry);
21785     }
parseFrom(byte[] data)21786     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(byte[] data)
21787         throws com.google.protobuf.InvalidProtocolBufferException {
21788       return PARSER.parseFrom(data);
21789     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21790     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(
21791         byte[] data,
21792         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21793         throws com.google.protobuf.InvalidProtocolBufferException {
21794       return PARSER.parseFrom(data, extensionRegistry);
21795     }
parseFrom(java.io.InputStream input)21796     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(java.io.InputStream input)
21797         throws java.io.IOException {
21798       return PARSER.parseFrom(input);
21799     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21800     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(
21801         java.io.InputStream input,
21802         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21803         throws java.io.IOException {
21804       return PARSER.parseFrom(input, extensionRegistry);
21805     }
parseDelimitedFrom(java.io.InputStream input)21806     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseDelimitedFrom(java.io.InputStream input)
21807         throws java.io.IOException {
21808       return PARSER.parseDelimitedFrom(input);
21809     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21810     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseDelimitedFrom(
21811         java.io.InputStream input,
21812         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21813         throws java.io.IOException {
21814       return PARSER.parseDelimitedFrom(input, extensionRegistry);
21815     }
parseFrom( com.google.protobuf.CodedInputStream input)21816     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(
21817         com.google.protobuf.CodedInputStream input)
21818         throws java.io.IOException {
21819       return PARSER.parseFrom(input);
21820     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21821     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parseFrom(
21822         com.google.protobuf.CodedInputStream input,
21823         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21824         throws java.io.IOException {
21825       return PARSER.parseFrom(input, extensionRegistry);
21826     }
21827 
newBuilder()21828     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()21829     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse prototype)21830     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse prototype) {
21831       return newBuilder().mergeFrom(prototype);
21832     }
toBuilder()21833     public Builder toBuilder() { return newBuilder(this); }
21834 
21835     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)21836     protected Builder newBuilderForType(
21837         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21838       Builder builder = new Builder(parent);
21839       return builder;
21840     }
21841     /**
21842      * Protobuf type {@code ListTableDescriptorsByNamespaceResponse}
21843      */
21844     public static final class Builder extends
21845         com.google.protobuf.GeneratedMessage.Builder<Builder>
21846        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponseOrBuilder {
21847       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21848           getDescriptor() {
21849         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_descriptor;
21850       }
21851 
21852       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21853           internalGetFieldAccessorTable() {
21854         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable
21855             .ensureFieldAccessorsInitialized(
21856                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.Builder.class);
21857       }
21858 
21859       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.newBuilder()
Builder()21860       private Builder() {
21861         maybeForceBuilderInitialization();
21862       }
21863 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)21864       private Builder(
21865           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21866         super(parent);
21867         maybeForceBuilderInitialization();
21868       }
maybeForceBuilderInitialization()21869       private void maybeForceBuilderInitialization() {
21870         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21871           getTableSchemaFieldBuilder();
21872         }
21873       }
create()21874       private static Builder create() {
21875         return new Builder();
21876       }
21877 
clear()21878       public Builder clear() {
21879         super.clear();
21880         if (tableSchemaBuilder_ == null) {
21881           tableSchema_ = java.util.Collections.emptyList();
21882           bitField0_ = (bitField0_ & ~0x00000001);
21883         } else {
21884           tableSchemaBuilder_.clear();
21885         }
21886         return this;
21887       }
21888 
clone()21889       public Builder clone() {
21890         return create().mergeFrom(buildPartial());
21891       }
21892 
21893       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()21894           getDescriptorForType() {
21895         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableDescriptorsByNamespaceResponse_descriptor;
21896       }
21897 
getDefaultInstanceForType()21898       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse getDefaultInstanceForType() {
21899         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
21900       }
21901 
build()21902       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse build() {
21903         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse result = buildPartial();
21904         if (!result.isInitialized()) {
21905           throw newUninitializedMessageException(result);
21906         }
21907         return result;
21908       }
21909 
buildPartial()21910       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse buildPartial() {
21911         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse(this);
21912         int from_bitField0_ = bitField0_;
21913         if (tableSchemaBuilder_ == null) {
21914           if (((bitField0_ & 0x00000001) == 0x00000001)) {
21915             tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_);
21916             bitField0_ = (bitField0_ & ~0x00000001);
21917           }
21918           result.tableSchema_ = tableSchema_;
21919         } else {
21920           result.tableSchema_ = tableSchemaBuilder_.build();
21921         }
21922         onBuilt();
21923         return result;
21924       }
21925 
mergeFrom(com.google.protobuf.Message other)21926       public Builder mergeFrom(com.google.protobuf.Message other) {
21927         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) {
21928           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse)other);
21929         } else {
21930           super.mergeFrom(other);
21931           return this;
21932         }
21933       }
21934 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse other)21935       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse other) {
21936         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance()) return this;
21937         if (tableSchemaBuilder_ == null) {
21938           if (!other.tableSchema_.isEmpty()) {
21939             if (tableSchema_.isEmpty()) {
21940               tableSchema_ = other.tableSchema_;
21941               bitField0_ = (bitField0_ & ~0x00000001);
21942             } else {
21943               ensureTableSchemaIsMutable();
21944               tableSchema_.addAll(other.tableSchema_);
21945             }
21946             onChanged();
21947           }
21948         } else {
21949           if (!other.tableSchema_.isEmpty()) {
21950             if (tableSchemaBuilder_.isEmpty()) {
21951               tableSchemaBuilder_.dispose();
21952               tableSchemaBuilder_ = null;
21953               tableSchema_ = other.tableSchema_;
21954               bitField0_ = (bitField0_ & ~0x00000001);
21955               tableSchemaBuilder_ =
21956                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
21957                    getTableSchemaFieldBuilder() : null;
21958             } else {
21959               tableSchemaBuilder_.addAllMessages(other.tableSchema_);
21960             }
21961           }
21962         }
21963         this.mergeUnknownFields(other.getUnknownFields());
21964         return this;
21965       }
21966 
isInitialized()21967       public final boolean isInitialized() {
21968         for (int i = 0; i < getTableSchemaCount(); i++) {
21969           if (!getTableSchema(i).isInitialized()) {
21970 
21971             return false;
21972           }
21973         }
21974         return true;
21975       }
21976 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21977       public Builder mergeFrom(
21978           com.google.protobuf.CodedInputStream input,
21979           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21980           throws java.io.IOException {
21981         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse parsedMessage = null;
21982         try {
21983           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21984         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21985           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) e.getUnfinishedMessage();
21986           throw e;
21987         } finally {
21988           if (parsedMessage != null) {
21989             mergeFrom(parsedMessage);
21990           }
21991         }
21992         return this;
21993       }
21994       private int bitField0_;
21995 
21996       // repeated .TableSchema tableSchema = 1;
21997       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_ =
21998         java.util.Collections.emptyList();
ensureTableSchemaIsMutable()21999       private void ensureTableSchemaIsMutable() {
22000         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
22001           tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>(tableSchema_);
22002           bitField0_ |= 0x00000001;
22003          }
22004       }
22005 
22006       private com.google.protobuf.RepeatedFieldBuilder<
22007           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_;
22008 
22009       /**
22010        * <code>repeated .TableSchema tableSchema = 1;</code>
22011        */
getTableSchemaList()22012       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() {
22013         if (tableSchemaBuilder_ == null) {
22014           return java.util.Collections.unmodifiableList(tableSchema_);
22015         } else {
22016           return tableSchemaBuilder_.getMessageList();
22017         }
22018       }
22019       /**
22020        * <code>repeated .TableSchema tableSchema = 1;</code>
22021        */
getTableSchemaCount()22022       public int getTableSchemaCount() {
22023         if (tableSchemaBuilder_ == null) {
22024           return tableSchema_.size();
22025         } else {
22026           return tableSchemaBuilder_.getCount();
22027         }
22028       }
22029       /**
22030        * <code>repeated .TableSchema tableSchema = 1;</code>
22031        */
getTableSchema(int index)22032       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) {
22033         if (tableSchemaBuilder_ == null) {
22034           return tableSchema_.get(index);
22035         } else {
22036           return tableSchemaBuilder_.getMessage(index);
22037         }
22038       }
22039       /**
22040        * <code>repeated .TableSchema tableSchema = 1;</code>
22041        */
setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)22042       public Builder setTableSchema(
22043           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
22044         if (tableSchemaBuilder_ == null) {
22045           if (value == null) {
22046             throw new NullPointerException();
22047           }
22048           ensureTableSchemaIsMutable();
22049           tableSchema_.set(index, value);
22050           onChanged();
22051         } else {
22052           tableSchemaBuilder_.setMessage(index, value);
22053         }
22054         return this;
22055       }
22056       /**
22057        * <code>repeated .TableSchema tableSchema = 1;</code>
22058        */
setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)22059       public Builder setTableSchema(
22060           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
22061         if (tableSchemaBuilder_ == null) {
22062           ensureTableSchemaIsMutable();
22063           tableSchema_.set(index, builderForValue.build());
22064           onChanged();
22065         } else {
22066           tableSchemaBuilder_.setMessage(index, builderForValue.build());
22067         }
22068         return this;
22069       }
22070       /**
22071        * <code>repeated .TableSchema tableSchema = 1;</code>
22072        */
addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)22073       public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
22074         if (tableSchemaBuilder_ == null) {
22075           if (value == null) {
22076             throw new NullPointerException();
22077           }
22078           ensureTableSchemaIsMutable();
22079           tableSchema_.add(value);
22080           onChanged();
22081         } else {
22082           tableSchemaBuilder_.addMessage(value);
22083         }
22084         return this;
22085       }
22086       /**
22087        * <code>repeated .TableSchema tableSchema = 1;</code>
22088        */
addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)22089       public Builder addTableSchema(
22090           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
22091         if (tableSchemaBuilder_ == null) {
22092           if (value == null) {
22093             throw new NullPointerException();
22094           }
22095           ensureTableSchemaIsMutable();
22096           tableSchema_.add(index, value);
22097           onChanged();
22098         } else {
22099           tableSchemaBuilder_.addMessage(index, value);
22100         }
22101         return this;
22102       }
22103       /**
22104        * <code>repeated .TableSchema tableSchema = 1;</code>
22105        */
addTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)22106       public Builder addTableSchema(
22107           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
22108         if (tableSchemaBuilder_ == null) {
22109           ensureTableSchemaIsMutable();
22110           tableSchema_.add(builderForValue.build());
22111           onChanged();
22112         } else {
22113           tableSchemaBuilder_.addMessage(builderForValue.build());
22114         }
22115         return this;
22116       }
22117       /**
22118        * <code>repeated .TableSchema tableSchema = 1;</code>
22119        */
addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)22120       public Builder addTableSchema(
22121           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
22122         if (tableSchemaBuilder_ == null) {
22123           ensureTableSchemaIsMutable();
22124           tableSchema_.add(index, builderForValue.build());
22125           onChanged();
22126         } else {
22127           tableSchemaBuilder_.addMessage(index, builderForValue.build());
22128         }
22129         return this;
22130       }
22131       /**
22132        * <code>repeated .TableSchema tableSchema = 1;</code>
22133        */
addAllTableSchema( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> values)22134       public Builder addAllTableSchema(
22135           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> values) {
22136         if (tableSchemaBuilder_ == null) {
22137           ensureTableSchemaIsMutable();
22138           super.addAll(values, tableSchema_);
22139           onChanged();
22140         } else {
22141           tableSchemaBuilder_.addAllMessages(values);
22142         }
22143         return this;
22144       }
22145       /**
22146        * <code>repeated .TableSchema tableSchema = 1;</code>
22147        */
clearTableSchema()22148       public Builder clearTableSchema() {
22149         if (tableSchemaBuilder_ == null) {
22150           tableSchema_ = java.util.Collections.emptyList();
22151           bitField0_ = (bitField0_ & ~0x00000001);
22152           onChanged();
22153         } else {
22154           tableSchemaBuilder_.clear();
22155         }
22156         return this;
22157       }
22158       /**
22159        * <code>repeated .TableSchema tableSchema = 1;</code>
22160        */
removeTableSchema(int index)22161       public Builder removeTableSchema(int index) {
22162         if (tableSchemaBuilder_ == null) {
22163           ensureTableSchemaIsMutable();
22164           tableSchema_.remove(index);
22165           onChanged();
22166         } else {
22167           tableSchemaBuilder_.remove(index);
22168         }
22169         return this;
22170       }
22171       /**
22172        * <code>repeated .TableSchema tableSchema = 1;</code>
22173        */
getTableSchemaBuilder( int index)22174       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder(
22175           int index) {
22176         return getTableSchemaFieldBuilder().getBuilder(index);
22177       }
22178       /**
22179        * <code>repeated .TableSchema tableSchema = 1;</code>
22180        */
getTableSchemaOrBuilder( int index)22181       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(
22182           int index) {
22183         if (tableSchemaBuilder_ == null) {
22184           return tableSchema_.get(index);  } else {
22185           return tableSchemaBuilder_.getMessageOrBuilder(index);
22186         }
22187       }
22188       /**
22189        * <code>repeated .TableSchema tableSchema = 1;</code>
22190        */
22191       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaOrBuilderList()22192            getTableSchemaOrBuilderList() {
22193         if (tableSchemaBuilder_ != null) {
22194           return tableSchemaBuilder_.getMessageOrBuilderList();
22195         } else {
22196           return java.util.Collections.unmodifiableList(tableSchema_);
22197         }
22198       }
22199       /**
22200        * <code>repeated .TableSchema tableSchema = 1;</code>
22201        */
addTableSchemaBuilder()22202       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() {
22203         return getTableSchemaFieldBuilder().addBuilder(
22204             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance());
22205       }
22206       /**
22207        * <code>repeated .TableSchema tableSchema = 1;</code>
22208        */
addTableSchemaBuilder( int index)22209       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder(
22210           int index) {
22211         return getTableSchemaFieldBuilder().addBuilder(
22212             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance());
22213       }
22214       /**
22215        * <code>repeated .TableSchema tableSchema = 1;</code>
22216        */
22217       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder>
getTableSchemaBuilderList()22218            getTableSchemaBuilderList() {
22219         return getTableSchemaFieldBuilder().getBuilderList();
22220       }
22221       private com.google.protobuf.RepeatedFieldBuilder<
22222           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaFieldBuilder()22223           getTableSchemaFieldBuilder() {
22224         if (tableSchemaBuilder_ == null) {
22225           tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
22226               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
22227                   tableSchema_,
22228                   ((bitField0_ & 0x00000001) == 0x00000001),
22229                   getParentForChildren(),
22230                   isClean());
22231           tableSchema_ = null;
22232         }
22233         return tableSchemaBuilder_;
22234       }
22235 
22236       // @@protoc_insertion_point(builder_scope:ListTableDescriptorsByNamespaceResponse)
22237     }
22238 
22239     static {
22240       defaultInstance = new ListTableDescriptorsByNamespaceResponse(true);
defaultInstance.initFields()22241       defaultInstance.initFields();
22242     }
22243 
22244     // @@protoc_insertion_point(class_scope:ListTableDescriptorsByNamespaceResponse)
22245   }
22246 
22247   public interface ListTableNamesByNamespaceRequestOrBuilder
22248       extends com.google.protobuf.MessageOrBuilder {
22249 
22250     // required string namespaceName = 1;
22251     /**
22252      * <code>required string namespaceName = 1;</code>
22253      */
hasNamespaceName()22254     boolean hasNamespaceName();
22255     /**
22256      * <code>required string namespaceName = 1;</code>
22257      */
getNamespaceName()22258     java.lang.String getNamespaceName();
22259     /**
22260      * <code>required string namespaceName = 1;</code>
22261      */
22262     com.google.protobuf.ByteString
getNamespaceNameBytes()22263         getNamespaceNameBytes();
22264   }
22265   /**
22266    * Protobuf type {@code ListTableNamesByNamespaceRequest}
22267    */
22268   public static final class ListTableNamesByNamespaceRequest extends
22269       com.google.protobuf.GeneratedMessage
22270       implements ListTableNamesByNamespaceRequestOrBuilder {
22271     // Use ListTableNamesByNamespaceRequest.newBuilder() to construct.
ListTableNamesByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)22272     private ListTableNamesByNamespaceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
22273       super(builder);
22274       this.unknownFields = builder.getUnknownFields();
22275     }
ListTableNamesByNamespaceRequest(boolean noInit)22276     private ListTableNamesByNamespaceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
22277 
22278     private static final ListTableNamesByNamespaceRequest defaultInstance;
getDefaultInstance()22279     public static ListTableNamesByNamespaceRequest getDefaultInstance() {
22280       return defaultInstance;
22281     }
22282 
getDefaultInstanceForType()22283     public ListTableNamesByNamespaceRequest getDefaultInstanceForType() {
22284       return defaultInstance;
22285     }
22286 
22287     private final com.google.protobuf.UnknownFieldSet unknownFields;
22288     @java.lang.Override
22289     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()22290         getUnknownFields() {
22291       return this.unknownFields;
22292     }
ListTableNamesByNamespaceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22293     private ListTableNamesByNamespaceRequest(
22294         com.google.protobuf.CodedInputStream input,
22295         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22296         throws com.google.protobuf.InvalidProtocolBufferException {
22297       initFields();
22298       int mutable_bitField0_ = 0;
22299       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22300           com.google.protobuf.UnknownFieldSet.newBuilder();
22301       try {
22302         boolean done = false;
22303         while (!done) {
22304           int tag = input.readTag();
22305           switch (tag) {
22306             case 0:
22307               done = true;
22308               break;
22309             default: {
22310               if (!parseUnknownField(input, unknownFields,
22311                                      extensionRegistry, tag)) {
22312                 done = true;
22313               }
22314               break;
22315             }
22316             case 10: {
22317               bitField0_ |= 0x00000001;
22318               namespaceName_ = input.readBytes();
22319               break;
22320             }
22321           }
22322         }
22323       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22324         throw e.setUnfinishedMessage(this);
22325       } catch (java.io.IOException e) {
22326         throw new com.google.protobuf.InvalidProtocolBufferException(
22327             e.getMessage()).setUnfinishedMessage(this);
22328       } finally {
22329         this.unknownFields = unknownFields.build();
22330         makeExtensionsImmutable();
22331       }
22332     }
22333     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()22334         getDescriptor() {
22335       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_descriptor;
22336     }
22337 
22338     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()22339         internalGetFieldAccessorTable() {
22340       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable
22341           .ensureFieldAccessorsInitialized(
22342               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.Builder.class);
22343     }
22344 
22345     public static com.google.protobuf.Parser<ListTableNamesByNamespaceRequest> PARSER =
22346         new com.google.protobuf.AbstractParser<ListTableNamesByNamespaceRequest>() {
22347       public ListTableNamesByNamespaceRequest parsePartialFrom(
22348           com.google.protobuf.CodedInputStream input,
22349           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22350           throws com.google.protobuf.InvalidProtocolBufferException {
22351         return new ListTableNamesByNamespaceRequest(input, extensionRegistry);
22352       }
22353     };
22354 
22355     @java.lang.Override
getParserForType()22356     public com.google.protobuf.Parser<ListTableNamesByNamespaceRequest> getParserForType() {
22357       return PARSER;
22358     }
22359 
22360     private int bitField0_;
22361     // required string namespaceName = 1;
22362     public static final int NAMESPACENAME_FIELD_NUMBER = 1;
22363     private java.lang.Object namespaceName_;
22364     /**
22365      * <code>required string namespaceName = 1;</code>
22366      */
hasNamespaceName()22367     public boolean hasNamespaceName() {
22368       return ((bitField0_ & 0x00000001) == 0x00000001);
22369     }
22370     /**
22371      * <code>required string namespaceName = 1;</code>
22372      */
getNamespaceName()22373     public java.lang.String getNamespaceName() {
22374       java.lang.Object ref = namespaceName_;
22375       if (ref instanceof java.lang.String) {
22376         return (java.lang.String) ref;
22377       } else {
22378         com.google.protobuf.ByteString bs =
22379             (com.google.protobuf.ByteString) ref;
22380         java.lang.String s = bs.toStringUtf8();
22381         if (bs.isValidUtf8()) {
22382           namespaceName_ = s;
22383         }
22384         return s;
22385       }
22386     }
22387     /**
22388      * <code>required string namespaceName = 1;</code>
22389      */
22390     public com.google.protobuf.ByteString
getNamespaceNameBytes()22391         getNamespaceNameBytes() {
22392       java.lang.Object ref = namespaceName_;
22393       if (ref instanceof java.lang.String) {
22394         com.google.protobuf.ByteString b =
22395             com.google.protobuf.ByteString.copyFromUtf8(
22396                 (java.lang.String) ref);
22397         namespaceName_ = b;
22398         return b;
22399       } else {
22400         return (com.google.protobuf.ByteString) ref;
22401       }
22402     }
22403 
initFields()22404     private void initFields() {
22405       namespaceName_ = "";
22406     }
22407     private byte memoizedIsInitialized = -1;
isInitialized()22408     public final boolean isInitialized() {
22409       byte isInitialized = memoizedIsInitialized;
22410       if (isInitialized != -1) return isInitialized == 1;
22411 
22412       if (!hasNamespaceName()) {
22413         memoizedIsInitialized = 0;
22414         return false;
22415       }
22416       memoizedIsInitialized = 1;
22417       return true;
22418     }
22419 
writeTo(com.google.protobuf.CodedOutputStream output)22420     public void writeTo(com.google.protobuf.CodedOutputStream output)
22421                         throws java.io.IOException {
22422       getSerializedSize();
22423       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22424         output.writeBytes(1, getNamespaceNameBytes());
22425       }
22426       getUnknownFields().writeTo(output);
22427     }
22428 
22429     private int memoizedSerializedSize = -1;
getSerializedSize()22430     public int getSerializedSize() {
22431       int size = memoizedSerializedSize;
22432       if (size != -1) return size;
22433 
22434       size = 0;
22435       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22436         size += com.google.protobuf.CodedOutputStream
22437           .computeBytesSize(1, getNamespaceNameBytes());
22438       }
22439       size += getUnknownFields().getSerializedSize();
22440       memoizedSerializedSize = size;
22441       return size;
22442     }
22443 
22444     private static final long serialVersionUID = 0L;
22445     @java.lang.Override
writeReplace()22446     protected java.lang.Object writeReplace()
22447         throws java.io.ObjectStreamException {
22448       return super.writeReplace();
22449     }
22450 
22451     @java.lang.Override
equals(final java.lang.Object obj)22452     public boolean equals(final java.lang.Object obj) {
22453       if (obj == this) {
22454        return true;
22455       }
22456       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)) {
22457         return super.equals(obj);
22458       }
22459       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) obj;
22460 
22461       boolean result = true;
22462       result = result && (hasNamespaceName() == other.hasNamespaceName());
22463       if (hasNamespaceName()) {
22464         result = result && getNamespaceName()
22465             .equals(other.getNamespaceName());
22466       }
22467       result = result &&
22468           getUnknownFields().equals(other.getUnknownFields());
22469       return result;
22470     }
22471 
22472     private int memoizedHashCode = 0;
22473     @java.lang.Override
hashCode()22474     public int hashCode() {
22475       if (memoizedHashCode != 0) {
22476         return memoizedHashCode;
22477       }
22478       int hash = 41;
22479       hash = (19 * hash) + getDescriptorForType().hashCode();
22480       if (hasNamespaceName()) {
22481         hash = (37 * hash) + NAMESPACENAME_FIELD_NUMBER;
22482         hash = (53 * hash) + getNamespaceName().hashCode();
22483       }
22484       hash = (29 * hash) + getUnknownFields().hashCode();
22485       memoizedHashCode = hash;
22486       return hash;
22487     }
22488 
parseFrom( com.google.protobuf.ByteString data)22489     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(
22490         com.google.protobuf.ByteString data)
22491         throws com.google.protobuf.InvalidProtocolBufferException {
22492       return PARSER.parseFrom(data);
22493     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22494     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(
22495         com.google.protobuf.ByteString data,
22496         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22497         throws com.google.protobuf.InvalidProtocolBufferException {
22498       return PARSER.parseFrom(data, extensionRegistry);
22499     }
parseFrom(byte[] data)22500     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(byte[] data)
22501         throws com.google.protobuf.InvalidProtocolBufferException {
22502       return PARSER.parseFrom(data);
22503     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22504     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(
22505         byte[] data,
22506         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22507         throws com.google.protobuf.InvalidProtocolBufferException {
22508       return PARSER.parseFrom(data, extensionRegistry);
22509     }
parseFrom(java.io.InputStream input)22510     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(java.io.InputStream input)
22511         throws java.io.IOException {
22512       return PARSER.parseFrom(input);
22513     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22514     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(
22515         java.io.InputStream input,
22516         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22517         throws java.io.IOException {
22518       return PARSER.parseFrom(input, extensionRegistry);
22519     }
parseDelimitedFrom(java.io.InputStream input)22520     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseDelimitedFrom(java.io.InputStream input)
22521         throws java.io.IOException {
22522       return PARSER.parseDelimitedFrom(input);
22523     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22524     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseDelimitedFrom(
22525         java.io.InputStream input,
22526         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22527         throws java.io.IOException {
22528       return PARSER.parseDelimitedFrom(input, extensionRegistry);
22529     }
parseFrom( com.google.protobuf.CodedInputStream input)22530     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(
22531         com.google.protobuf.CodedInputStream input)
22532         throws java.io.IOException {
22533       return PARSER.parseFrom(input);
22534     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22535     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parseFrom(
22536         com.google.protobuf.CodedInputStream input,
22537         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22538         throws java.io.IOException {
22539       return PARSER.parseFrom(input, extensionRegistry);
22540     }
22541 
newBuilder()22542     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()22543     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest prototype)22544     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest prototype) {
22545       return newBuilder().mergeFrom(prototype);
22546     }
toBuilder()22547     public Builder toBuilder() { return newBuilder(this); }
22548 
22549     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)22550     protected Builder newBuilderForType(
22551         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22552       Builder builder = new Builder(parent);
22553       return builder;
22554     }
22555     /**
22556      * Protobuf type {@code ListTableNamesByNamespaceRequest}
22557      */
22558     public static final class Builder extends
22559         com.google.protobuf.GeneratedMessage.Builder<Builder>
22560        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequestOrBuilder {
22561       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()22562           getDescriptor() {
22563         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_descriptor;
22564       }
22565 
22566       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()22567           internalGetFieldAccessorTable() {
22568         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable
22569             .ensureFieldAccessorsInitialized(
22570                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.Builder.class);
22571       }
22572 
22573       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.newBuilder()
Builder()22574       private Builder() {
22575         maybeForceBuilderInitialization();
22576       }
22577 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)22578       private Builder(
22579           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22580         super(parent);
22581         maybeForceBuilderInitialization();
22582       }
maybeForceBuilderInitialization()22583       private void maybeForceBuilderInitialization() {
22584         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22585         }
22586       }
create()22587       private static Builder create() {
22588         return new Builder();
22589       }
22590 
clear()22591       public Builder clear() {
22592         super.clear();
22593         namespaceName_ = "";
22594         bitField0_ = (bitField0_ & ~0x00000001);
22595         return this;
22596       }
22597 
clone()22598       public Builder clone() {
22599         return create().mergeFrom(buildPartial());
22600       }
22601 
22602       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()22603           getDescriptorForType() {
22604         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceRequest_descriptor;
22605       }
22606 
getDefaultInstanceForType()22607       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest getDefaultInstanceForType() {
22608         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
22609       }
22610 
build()22611       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest build() {
22612         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest result = buildPartial();
22613         if (!result.isInitialized()) {
22614           throw newUninitializedMessageException(result);
22615         }
22616         return result;
22617       }
22618 
buildPartial()22619       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest buildPartial() {
22620         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest(this);
22621         int from_bitField0_ = bitField0_;
22622         int to_bitField0_ = 0;
22623         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22624           to_bitField0_ |= 0x00000001;
22625         }
22626         result.namespaceName_ = namespaceName_;
22627         result.bitField0_ = to_bitField0_;
22628         onBuilt();
22629         return result;
22630       }
22631 
mergeFrom(com.google.protobuf.Message other)22632       public Builder mergeFrom(com.google.protobuf.Message other) {
22633         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) {
22634           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)other);
22635         } else {
22636           super.mergeFrom(other);
22637           return this;
22638         }
22639       }
22640 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest other)22641       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest other) {
22642         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance()) return this;
22643         if (other.hasNamespaceName()) {
22644           bitField0_ |= 0x00000001;
22645           namespaceName_ = other.namespaceName_;
22646           onChanged();
22647         }
22648         this.mergeUnknownFields(other.getUnknownFields());
22649         return this;
22650       }
22651 
isInitialized()22652       public final boolean isInitialized() {
22653         if (!hasNamespaceName()) {
22654 
22655           return false;
22656         }
22657         return true;
22658       }
22659 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22660       public Builder mergeFrom(
22661           com.google.protobuf.CodedInputStream input,
22662           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22663           throws java.io.IOException {
22664         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest parsedMessage = null;
22665         try {
22666           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22667         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22668           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest) e.getUnfinishedMessage();
22669           throw e;
22670         } finally {
22671           if (parsedMessage != null) {
22672             mergeFrom(parsedMessage);
22673           }
22674         }
22675         return this;
22676       }
22677       private int bitField0_;
22678 
22679       // required string namespaceName = 1;
22680       private java.lang.Object namespaceName_ = "";
22681       /**
22682        * <code>required string namespaceName = 1;</code>
22683        */
hasNamespaceName()22684       public boolean hasNamespaceName() {
22685         return ((bitField0_ & 0x00000001) == 0x00000001);
22686       }
22687       /**
22688        * <code>required string namespaceName = 1;</code>
22689        */
getNamespaceName()22690       public java.lang.String getNamespaceName() {
22691         java.lang.Object ref = namespaceName_;
22692         if (!(ref instanceof java.lang.String)) {
22693           java.lang.String s = ((com.google.protobuf.ByteString) ref)
22694               .toStringUtf8();
22695           namespaceName_ = s;
22696           return s;
22697         } else {
22698           return (java.lang.String) ref;
22699         }
22700       }
22701       /**
22702        * <code>required string namespaceName = 1;</code>
22703        */
22704       public com.google.protobuf.ByteString
getNamespaceNameBytes()22705           getNamespaceNameBytes() {
22706         java.lang.Object ref = namespaceName_;
22707         if (ref instanceof String) {
22708           com.google.protobuf.ByteString b =
22709               com.google.protobuf.ByteString.copyFromUtf8(
22710                   (java.lang.String) ref);
22711           namespaceName_ = b;
22712           return b;
22713         } else {
22714           return (com.google.protobuf.ByteString) ref;
22715         }
22716       }
22717       /**
22718        * <code>required string namespaceName = 1;</code>
22719        */
setNamespaceName( java.lang.String value)22720       public Builder setNamespaceName(
22721           java.lang.String value) {
22722         if (value == null) {
22723     throw new NullPointerException();
22724   }
22725   bitField0_ |= 0x00000001;
22726         namespaceName_ = value;
22727         onChanged();
22728         return this;
22729       }
22730       /**
22731        * <code>required string namespaceName = 1;</code>
22732        */
clearNamespaceName()22733       public Builder clearNamespaceName() {
22734         bitField0_ = (bitField0_ & ~0x00000001);
22735         namespaceName_ = getDefaultInstance().getNamespaceName();
22736         onChanged();
22737         return this;
22738       }
22739       /**
22740        * <code>required string namespaceName = 1;</code>
22741        */
setNamespaceNameBytes( com.google.protobuf.ByteString value)22742       public Builder setNamespaceNameBytes(
22743           com.google.protobuf.ByteString value) {
22744         if (value == null) {
22745     throw new NullPointerException();
22746   }
22747   bitField0_ |= 0x00000001;
22748         namespaceName_ = value;
22749         onChanged();
22750         return this;
22751       }
22752 
22753       // @@protoc_insertion_point(builder_scope:ListTableNamesByNamespaceRequest)
22754     }
22755 
22756     static {
22757       defaultInstance = new ListTableNamesByNamespaceRequest(true);
defaultInstance.initFields()22758       defaultInstance.initFields();
22759     }
22760 
22761     // @@protoc_insertion_point(class_scope:ListTableNamesByNamespaceRequest)
22762   }
22763 
22764   public interface ListTableNamesByNamespaceResponseOrBuilder
22765       extends com.google.protobuf.MessageOrBuilder {
22766 
22767     // repeated .TableName tableName = 1;
22768     /**
22769      * <code>repeated .TableName tableName = 1;</code>
22770      */
22771     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>
getTableNameList()22772         getTableNameList();
22773     /**
22774      * <code>repeated .TableName tableName = 1;</code>
22775      */
getTableName(int index)22776     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index);
22777     /**
22778      * <code>repeated .TableName tableName = 1;</code>
22779      */
getTableNameCount()22780     int getTableNameCount();
22781     /**
22782      * <code>repeated .TableName tableName = 1;</code>
22783      */
22784     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameOrBuilderList()22785         getTableNameOrBuilderList();
22786     /**
22787      * <code>repeated .TableName tableName = 1;</code>
22788      */
getTableNameOrBuilder( int index)22789     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
22790         int index);
22791   }
22792   /**
22793    * Protobuf type {@code ListTableNamesByNamespaceResponse}
22794    */
22795   public static final class ListTableNamesByNamespaceResponse extends
22796       com.google.protobuf.GeneratedMessage
22797       implements ListTableNamesByNamespaceResponseOrBuilder {
22798     // Use ListTableNamesByNamespaceResponse.newBuilder() to construct.
ListTableNamesByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)22799     private ListTableNamesByNamespaceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
22800       super(builder);
22801       this.unknownFields = builder.getUnknownFields();
22802     }
ListTableNamesByNamespaceResponse(boolean noInit)22803     private ListTableNamesByNamespaceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
22804 
22805     private static final ListTableNamesByNamespaceResponse defaultInstance;
getDefaultInstance()22806     public static ListTableNamesByNamespaceResponse getDefaultInstance() {
22807       return defaultInstance;
22808     }
22809 
getDefaultInstanceForType()22810     public ListTableNamesByNamespaceResponse getDefaultInstanceForType() {
22811       return defaultInstance;
22812     }
22813 
22814     private final com.google.protobuf.UnknownFieldSet unknownFields;
22815     @java.lang.Override
22816     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()22817         getUnknownFields() {
22818       return this.unknownFields;
22819     }
ListTableNamesByNamespaceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22820     private ListTableNamesByNamespaceResponse(
22821         com.google.protobuf.CodedInputStream input,
22822         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22823         throws com.google.protobuf.InvalidProtocolBufferException {
22824       initFields();
22825       int mutable_bitField0_ = 0;
22826       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22827           com.google.protobuf.UnknownFieldSet.newBuilder();
22828       try {
22829         boolean done = false;
22830         while (!done) {
22831           int tag = input.readTag();
22832           switch (tag) {
22833             case 0:
22834               done = true;
22835               break;
22836             default: {
22837               if (!parseUnknownField(input, unknownFields,
22838                                      extensionRegistry, tag)) {
22839                 done = true;
22840               }
22841               break;
22842             }
22843             case 10: {
22844               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
22845                 tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>();
22846                 mutable_bitField0_ |= 0x00000001;
22847               }
22848               tableName_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
22849               break;
22850             }
22851           }
22852         }
22853       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22854         throw e.setUnfinishedMessage(this);
22855       } catch (java.io.IOException e) {
22856         throw new com.google.protobuf.InvalidProtocolBufferException(
22857             e.getMessage()).setUnfinishedMessage(this);
22858       } finally {
22859         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
22860           tableName_ = java.util.Collections.unmodifiableList(tableName_);
22861         }
22862         this.unknownFields = unknownFields.build();
22863         makeExtensionsImmutable();
22864       }
22865     }
22866     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()22867         getDescriptor() {
22868       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_descriptor;
22869     }
22870 
22871     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()22872         internalGetFieldAccessorTable() {
22873       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable
22874           .ensureFieldAccessorsInitialized(
22875               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.Builder.class);
22876     }
22877 
22878     public static com.google.protobuf.Parser<ListTableNamesByNamespaceResponse> PARSER =
22879         new com.google.protobuf.AbstractParser<ListTableNamesByNamespaceResponse>() {
22880       public ListTableNamesByNamespaceResponse parsePartialFrom(
22881           com.google.protobuf.CodedInputStream input,
22882           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22883           throws com.google.protobuf.InvalidProtocolBufferException {
22884         return new ListTableNamesByNamespaceResponse(input, extensionRegistry);
22885       }
22886     };
22887 
22888     @java.lang.Override
getParserForType()22889     public com.google.protobuf.Parser<ListTableNamesByNamespaceResponse> getParserForType() {
22890       return PARSER;
22891     }
22892 
22893     // repeated .TableName tableName = 1;
22894     public static final int TABLENAME_FIELD_NUMBER = 1;
22895     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableName_;
22896     /**
22897      * <code>repeated .TableName tableName = 1;</code>
22898      */
getTableNameList()22899     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList() {
22900       return tableName_;
22901     }
22902     /**
22903      * <code>repeated .TableName tableName = 1;</code>
22904      */
22905     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameOrBuilderList()22906         getTableNameOrBuilderList() {
22907       return tableName_;
22908     }
22909     /**
22910      * <code>repeated .TableName tableName = 1;</code>
22911      */
getTableNameCount()22912     public int getTableNameCount() {
22913       return tableName_.size();
22914     }
22915     /**
22916      * <code>repeated .TableName tableName = 1;</code>
22917      */
getTableName(int index)22918     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
22919       return tableName_.get(index);
22920     }
22921     /**
22922      * <code>repeated .TableName tableName = 1;</code>
22923      */
getTableNameOrBuilder( int index)22924     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
22925         int index) {
22926       return tableName_.get(index);
22927     }
22928 
initFields()22929     private void initFields() {
22930       tableName_ = java.util.Collections.emptyList();
22931     }
22932     private byte memoizedIsInitialized = -1;
isInitialized()22933     public final boolean isInitialized() {
22934       byte isInitialized = memoizedIsInitialized;
22935       if (isInitialized != -1) return isInitialized == 1;
22936 
22937       for (int i = 0; i < getTableNameCount(); i++) {
22938         if (!getTableName(i).isInitialized()) {
22939           memoizedIsInitialized = 0;
22940           return false;
22941         }
22942       }
22943       memoizedIsInitialized = 1;
22944       return true;
22945     }
22946 
writeTo(com.google.protobuf.CodedOutputStream output)22947     public void writeTo(com.google.protobuf.CodedOutputStream output)
22948                         throws java.io.IOException {
22949       getSerializedSize();
22950       for (int i = 0; i < tableName_.size(); i++) {
22951         output.writeMessage(1, tableName_.get(i));
22952       }
22953       getUnknownFields().writeTo(output);
22954     }
22955 
22956     private int memoizedSerializedSize = -1;
getSerializedSize()22957     public int getSerializedSize() {
22958       int size = memoizedSerializedSize;
22959       if (size != -1) return size;
22960 
22961       size = 0;
22962       for (int i = 0; i < tableName_.size(); i++) {
22963         size += com.google.protobuf.CodedOutputStream
22964           .computeMessageSize(1, tableName_.get(i));
22965       }
22966       size += getUnknownFields().getSerializedSize();
22967       memoizedSerializedSize = size;
22968       return size;
22969     }
22970 
22971     private static final long serialVersionUID = 0L;
22972     @java.lang.Override
writeReplace()22973     protected java.lang.Object writeReplace()
22974         throws java.io.ObjectStreamException {
22975       return super.writeReplace();
22976     }
22977 
22978     @java.lang.Override
equals(final java.lang.Object obj)22979     public boolean equals(final java.lang.Object obj) {
22980       if (obj == this) {
22981        return true;
22982       }
22983       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse)) {
22984         return super.equals(obj);
22985       }
22986       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) obj;
22987 
22988       boolean result = true;
22989       result = result && getTableNameList()
22990           .equals(other.getTableNameList());
22991       result = result &&
22992           getUnknownFields().equals(other.getUnknownFields());
22993       return result;
22994     }
22995 
22996     private int memoizedHashCode = 0;
22997     @java.lang.Override
hashCode()22998     public int hashCode() {
22999       if (memoizedHashCode != 0) {
23000         return memoizedHashCode;
23001       }
23002       int hash = 41;
23003       hash = (19 * hash) + getDescriptorForType().hashCode();
23004       if (getTableNameCount() > 0) {
23005         hash = (37 * hash) + TABLENAME_FIELD_NUMBER;
23006         hash = (53 * hash) + getTableNameList().hashCode();
23007       }
23008       hash = (29 * hash) + getUnknownFields().hashCode();
23009       memoizedHashCode = hash;
23010       return hash;
23011     }
23012 
parseFrom( com.google.protobuf.ByteString data)23013     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(
23014         com.google.protobuf.ByteString data)
23015         throws com.google.protobuf.InvalidProtocolBufferException {
23016       return PARSER.parseFrom(data);
23017     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23018     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(
23019         com.google.protobuf.ByteString data,
23020         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23021         throws com.google.protobuf.InvalidProtocolBufferException {
23022       return PARSER.parseFrom(data, extensionRegistry);
23023     }
parseFrom(byte[] data)23024     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(byte[] data)
23025         throws com.google.protobuf.InvalidProtocolBufferException {
23026       return PARSER.parseFrom(data);
23027     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23028     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(
23029         byte[] data,
23030         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23031         throws com.google.protobuf.InvalidProtocolBufferException {
23032       return PARSER.parseFrom(data, extensionRegistry);
23033     }
parseFrom(java.io.InputStream input)23034     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(java.io.InputStream input)
23035         throws java.io.IOException {
23036       return PARSER.parseFrom(input);
23037     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23038     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(
23039         java.io.InputStream input,
23040         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23041         throws java.io.IOException {
23042       return PARSER.parseFrom(input, extensionRegistry);
23043     }
parseDelimitedFrom(java.io.InputStream input)23044     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseDelimitedFrom(java.io.InputStream input)
23045         throws java.io.IOException {
23046       return PARSER.parseDelimitedFrom(input);
23047     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23048     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseDelimitedFrom(
23049         java.io.InputStream input,
23050         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23051         throws java.io.IOException {
23052       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23053     }
parseFrom( com.google.protobuf.CodedInputStream input)23054     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(
23055         com.google.protobuf.CodedInputStream input)
23056         throws java.io.IOException {
23057       return PARSER.parseFrom(input);
23058     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23059     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parseFrom(
23060         com.google.protobuf.CodedInputStream input,
23061         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23062         throws java.io.IOException {
23063       return PARSER.parseFrom(input, extensionRegistry);
23064     }
23065 
newBuilder()23066     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()23067     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse prototype)23068     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse prototype) {
23069       return newBuilder().mergeFrom(prototype);
23070     }
toBuilder()23071     public Builder toBuilder() { return newBuilder(this); }
23072 
23073     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)23074     protected Builder newBuilderForType(
23075         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23076       Builder builder = new Builder(parent);
23077       return builder;
23078     }
23079     /**
23080      * Protobuf type {@code ListTableNamesByNamespaceResponse}
23081      */
23082     public static final class Builder extends
23083         com.google.protobuf.GeneratedMessage.Builder<Builder>
23084        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponseOrBuilder {
23085       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23086           getDescriptor() {
23087         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_descriptor;
23088       }
23089 
23090       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23091           internalGetFieldAccessorTable() {
23092         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable
23093             .ensureFieldAccessorsInitialized(
23094                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.Builder.class);
23095       }
23096 
23097       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.newBuilder()
Builder()23098       private Builder() {
23099         maybeForceBuilderInitialization();
23100       }
23101 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)23102       private Builder(
23103           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23104         super(parent);
23105         maybeForceBuilderInitialization();
23106       }
maybeForceBuilderInitialization()23107       private void maybeForceBuilderInitialization() {
23108         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23109           getTableNameFieldBuilder();
23110         }
23111       }
create()23112       private static Builder create() {
23113         return new Builder();
23114       }
23115 
clear()23116       public Builder clear() {
23117         super.clear();
23118         if (tableNameBuilder_ == null) {
23119           tableName_ = java.util.Collections.emptyList();
23120           bitField0_ = (bitField0_ & ~0x00000001);
23121         } else {
23122           tableNameBuilder_.clear();
23123         }
23124         return this;
23125       }
23126 
clone()23127       public Builder clone() {
23128         return create().mergeFrom(buildPartial());
23129       }
23130 
23131       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()23132           getDescriptorForType() {
23133         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListTableNamesByNamespaceResponse_descriptor;
23134       }
23135 
getDefaultInstanceForType()23136       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse getDefaultInstanceForType() {
23137         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
23138       }
23139 
build()23140       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse build() {
23141         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse result = buildPartial();
23142         if (!result.isInitialized()) {
23143           throw newUninitializedMessageException(result);
23144         }
23145         return result;
23146       }
23147 
buildPartial()23148       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse buildPartial() {
23149         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse(this);
23150         int from_bitField0_ = bitField0_;
23151         if (tableNameBuilder_ == null) {
23152           if (((bitField0_ & 0x00000001) == 0x00000001)) {
23153             tableName_ = java.util.Collections.unmodifiableList(tableName_);
23154             bitField0_ = (bitField0_ & ~0x00000001);
23155           }
23156           result.tableName_ = tableName_;
23157         } else {
23158           result.tableName_ = tableNameBuilder_.build();
23159         }
23160         onBuilt();
23161         return result;
23162       }
23163 
mergeFrom(com.google.protobuf.Message other)23164       public Builder mergeFrom(com.google.protobuf.Message other) {
23165         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) {
23166           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse)other);
23167         } else {
23168           super.mergeFrom(other);
23169           return this;
23170         }
23171       }
23172 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse other)23173       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse other) {
23174         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance()) return this;
23175         if (tableNameBuilder_ == null) {
23176           if (!other.tableName_.isEmpty()) {
23177             if (tableName_.isEmpty()) {
23178               tableName_ = other.tableName_;
23179               bitField0_ = (bitField0_ & ~0x00000001);
23180             } else {
23181               ensureTableNameIsMutable();
23182               tableName_.addAll(other.tableName_);
23183             }
23184             onChanged();
23185           }
23186         } else {
23187           if (!other.tableName_.isEmpty()) {
23188             if (tableNameBuilder_.isEmpty()) {
23189               tableNameBuilder_.dispose();
23190               tableNameBuilder_ = null;
23191               tableName_ = other.tableName_;
23192               bitField0_ = (bitField0_ & ~0x00000001);
23193               tableNameBuilder_ =
23194                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
23195                    getTableNameFieldBuilder() : null;
23196             } else {
23197               tableNameBuilder_.addAllMessages(other.tableName_);
23198             }
23199           }
23200         }
23201         this.mergeUnknownFields(other.getUnknownFields());
23202         return this;
23203       }
23204 
isInitialized()23205       public final boolean isInitialized() {
23206         for (int i = 0; i < getTableNameCount(); i++) {
23207           if (!getTableName(i).isInitialized()) {
23208 
23209             return false;
23210           }
23211         }
23212         return true;
23213       }
23214 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23215       public Builder mergeFrom(
23216           com.google.protobuf.CodedInputStream input,
23217           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23218           throws java.io.IOException {
23219         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse parsedMessage = null;
23220         try {
23221           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23222         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23223           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) e.getUnfinishedMessage();
23224           throw e;
23225         } finally {
23226           if (parsedMessage != null) {
23227             mergeFrom(parsedMessage);
23228           }
23229         }
23230         return this;
23231       }
23232       private int bitField0_;
23233 
23234       // repeated .TableName tableName = 1;
23235       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableName_ =
23236         java.util.Collections.emptyList();
ensureTableNameIsMutable()23237       private void ensureTableNameIsMutable() {
23238         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
23239           tableName_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableName_);
23240           bitField0_ |= 0x00000001;
23241          }
23242       }
23243 
23244       private com.google.protobuf.RepeatedFieldBuilder<
23245           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
23246 
23247       /**
23248        * <code>repeated .TableName tableName = 1;</code>
23249        */
getTableNameList()23250       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNameList() {
23251         if (tableNameBuilder_ == null) {
23252           return java.util.Collections.unmodifiableList(tableName_);
23253         } else {
23254           return tableNameBuilder_.getMessageList();
23255         }
23256       }
23257       /**
23258        * <code>repeated .TableName tableName = 1;</code>
23259        */
getTableNameCount()23260       public int getTableNameCount() {
23261         if (tableNameBuilder_ == null) {
23262           return tableName_.size();
23263         } else {
23264           return tableNameBuilder_.getCount();
23265         }
23266       }
23267       /**
23268        * <code>repeated .TableName tableName = 1;</code>
23269        */
getTableName(int index)23270       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(int index) {
23271         if (tableNameBuilder_ == null) {
23272           return tableName_.get(index);
23273         } else {
23274           return tableNameBuilder_.getMessage(index);
23275         }
23276       }
23277       /**
23278        * <code>repeated .TableName tableName = 1;</code>
23279        */
setTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)23280       public Builder setTableName(
23281           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
23282         if (tableNameBuilder_ == null) {
23283           if (value == null) {
23284             throw new NullPointerException();
23285           }
23286           ensureTableNameIsMutable();
23287           tableName_.set(index, value);
23288           onChanged();
23289         } else {
23290           tableNameBuilder_.setMessage(index, value);
23291         }
23292         return this;
23293       }
23294       /**
23295        * <code>repeated .TableName tableName = 1;</code>
23296        */
setTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)23297       public Builder setTableName(
23298           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
23299         if (tableNameBuilder_ == null) {
23300           ensureTableNameIsMutable();
23301           tableName_.set(index, builderForValue.build());
23302           onChanged();
23303         } else {
23304           tableNameBuilder_.setMessage(index, builderForValue.build());
23305         }
23306         return this;
23307       }
23308       /**
23309        * <code>repeated .TableName tableName = 1;</code>
23310        */
addTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)23311       public Builder addTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
23312         if (tableNameBuilder_ == null) {
23313           if (value == null) {
23314             throw new NullPointerException();
23315           }
23316           ensureTableNameIsMutable();
23317           tableName_.add(value);
23318           onChanged();
23319         } else {
23320           tableNameBuilder_.addMessage(value);
23321         }
23322         return this;
23323       }
23324       /**
23325        * <code>repeated .TableName tableName = 1;</code>
23326        */
addTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)23327       public Builder addTableName(
23328           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
23329         if (tableNameBuilder_ == null) {
23330           if (value == null) {
23331             throw new NullPointerException();
23332           }
23333           ensureTableNameIsMutable();
23334           tableName_.add(index, value);
23335           onChanged();
23336         } else {
23337           tableNameBuilder_.addMessage(index, value);
23338         }
23339         return this;
23340       }
23341       /**
23342        * <code>repeated .TableName tableName = 1;</code>
23343        */
addTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)23344       public Builder addTableName(
23345           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
23346         if (tableNameBuilder_ == null) {
23347           ensureTableNameIsMutable();
23348           tableName_.add(builderForValue.build());
23349           onChanged();
23350         } else {
23351           tableNameBuilder_.addMessage(builderForValue.build());
23352         }
23353         return this;
23354       }
23355       /**
23356        * <code>repeated .TableName tableName = 1;</code>
23357        */
addTableName( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)23358       public Builder addTableName(
23359           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
23360         if (tableNameBuilder_ == null) {
23361           ensureTableNameIsMutable();
23362           tableName_.add(index, builderForValue.build());
23363           onChanged();
23364         } else {
23365           tableNameBuilder_.addMessage(index, builderForValue.build());
23366         }
23367         return this;
23368       }
23369       /**
23370        * <code>repeated .TableName tableName = 1;</code>
23371        */
addAllTableName( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values)23372       public Builder addAllTableName(
23373           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) {
23374         if (tableNameBuilder_ == null) {
23375           ensureTableNameIsMutable();
23376           super.addAll(values, tableName_);
23377           onChanged();
23378         } else {
23379           tableNameBuilder_.addAllMessages(values);
23380         }
23381         return this;
23382       }
23383       /**
23384        * <code>repeated .TableName tableName = 1;</code>
23385        */
clearTableName()23386       public Builder clearTableName() {
23387         if (tableNameBuilder_ == null) {
23388           tableName_ = java.util.Collections.emptyList();
23389           bitField0_ = (bitField0_ & ~0x00000001);
23390           onChanged();
23391         } else {
23392           tableNameBuilder_.clear();
23393         }
23394         return this;
23395       }
23396       /**
23397        * <code>repeated .TableName tableName = 1;</code>
23398        */
removeTableName(int index)23399       public Builder removeTableName(int index) {
23400         if (tableNameBuilder_ == null) {
23401           ensureTableNameIsMutable();
23402           tableName_.remove(index);
23403           onChanged();
23404         } else {
23405           tableNameBuilder_.remove(index);
23406         }
23407         return this;
23408       }
23409       /**
23410        * <code>repeated .TableName tableName = 1;</code>
23411        */
getTableNameBuilder( int index)23412       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder(
23413           int index) {
23414         return getTableNameFieldBuilder().getBuilder(index);
23415       }
23416       /**
23417        * <code>repeated .TableName tableName = 1;</code>
23418        */
getTableNameOrBuilder( int index)23419       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(
23420           int index) {
23421         if (tableNameBuilder_ == null) {
23422           return tableName_.get(index);  } else {
23423           return tableNameBuilder_.getMessageOrBuilder(index);
23424         }
23425       }
23426       /**
23427        * <code>repeated .TableName tableName = 1;</code>
23428        */
23429       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameOrBuilderList()23430            getTableNameOrBuilderList() {
23431         if (tableNameBuilder_ != null) {
23432           return tableNameBuilder_.getMessageOrBuilderList();
23433         } else {
23434           return java.util.Collections.unmodifiableList(tableName_);
23435         }
23436       }
23437       /**
23438        * <code>repeated .TableName tableName = 1;</code>
23439        */
addTableNameBuilder()23440       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder() {
23441         return getTableNameFieldBuilder().addBuilder(
23442             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
23443       }
23444       /**
23445        * <code>repeated .TableName tableName = 1;</code>
23446        */
addTableNameBuilder( int index)23447       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNameBuilder(
23448           int index) {
23449         return getTableNameFieldBuilder().addBuilder(
23450             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
23451       }
23452       /**
23453        * <code>repeated .TableName tableName = 1;</code>
23454        */
23455       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder>
getTableNameBuilderList()23456            getTableNameBuilderList() {
23457         return getTableNameFieldBuilder().getBuilderList();
23458       }
23459       private com.google.protobuf.RepeatedFieldBuilder<
23460           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()23461           getTableNameFieldBuilder() {
23462         if (tableNameBuilder_ == null) {
23463           tableNameBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
23464               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
23465                   tableName_,
23466                   ((bitField0_ & 0x00000001) == 0x00000001),
23467                   getParentForChildren(),
23468                   isClean());
23469           tableName_ = null;
23470         }
23471         return tableNameBuilder_;
23472       }
23473 
23474       // @@protoc_insertion_point(builder_scope:ListTableNamesByNamespaceResponse)
23475     }
23476 
23477     static {
23478       defaultInstance = new ListTableNamesByNamespaceResponse(true);
defaultInstance.initFields()23479       defaultInstance.initFields();
23480     }
23481 
23482     // @@protoc_insertion_point(class_scope:ListTableNamesByNamespaceResponse)
23483   }
23484 
23485   public interface ShutdownRequestOrBuilder
23486       extends com.google.protobuf.MessageOrBuilder {
23487   }
23488   /**
23489    * Protobuf type {@code ShutdownRequest}
23490    */
23491   public static final class ShutdownRequest extends
23492       com.google.protobuf.GeneratedMessage
23493       implements ShutdownRequestOrBuilder {
23494     // Use ShutdownRequest.newBuilder() to construct.
ShutdownRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)23495     private ShutdownRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
23496       super(builder);
23497       this.unknownFields = builder.getUnknownFields();
23498     }
ShutdownRequest(boolean noInit)23499     private ShutdownRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
23500 
23501     private static final ShutdownRequest defaultInstance;
getDefaultInstance()23502     public static ShutdownRequest getDefaultInstance() {
23503       return defaultInstance;
23504     }
23505 
getDefaultInstanceForType()23506     public ShutdownRequest getDefaultInstanceForType() {
23507       return defaultInstance;
23508     }
23509 
23510     private final com.google.protobuf.UnknownFieldSet unknownFields;
23511     @java.lang.Override
23512     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()23513         getUnknownFields() {
23514       return this.unknownFields;
23515     }
ShutdownRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23516     private ShutdownRequest(
23517         com.google.protobuf.CodedInputStream input,
23518         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23519         throws com.google.protobuf.InvalidProtocolBufferException {
23520       initFields();
23521       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
23522           com.google.protobuf.UnknownFieldSet.newBuilder();
23523       try {
23524         boolean done = false;
23525         while (!done) {
23526           int tag = input.readTag();
23527           switch (tag) {
23528             case 0:
23529               done = true;
23530               break;
23531             default: {
23532               if (!parseUnknownField(input, unknownFields,
23533                                      extensionRegistry, tag)) {
23534                 done = true;
23535               }
23536               break;
23537             }
23538           }
23539         }
23540       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23541         throw e.setUnfinishedMessage(this);
23542       } catch (java.io.IOException e) {
23543         throw new com.google.protobuf.InvalidProtocolBufferException(
23544             e.getMessage()).setUnfinishedMessage(this);
23545       } finally {
23546         this.unknownFields = unknownFields.build();
23547         makeExtensionsImmutable();
23548       }
23549     }
23550     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23551         getDescriptor() {
23552       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_descriptor;
23553     }
23554 
23555     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23556         internalGetFieldAccessorTable() {
23557       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_fieldAccessorTable
23558           .ensureFieldAccessorsInitialized(
23559               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.Builder.class);
23560     }
23561 
23562     public static com.google.protobuf.Parser<ShutdownRequest> PARSER =
23563         new com.google.protobuf.AbstractParser<ShutdownRequest>() {
23564       public ShutdownRequest parsePartialFrom(
23565           com.google.protobuf.CodedInputStream input,
23566           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23567           throws com.google.protobuf.InvalidProtocolBufferException {
23568         return new ShutdownRequest(input, extensionRegistry);
23569       }
23570     };
23571 
23572     @java.lang.Override
getParserForType()23573     public com.google.protobuf.Parser<ShutdownRequest> getParserForType() {
23574       return PARSER;
23575     }
23576 
initFields()23577     private void initFields() {
23578     }
23579     private byte memoizedIsInitialized = -1;
isInitialized()23580     public final boolean isInitialized() {
23581       byte isInitialized = memoizedIsInitialized;
23582       if (isInitialized != -1) return isInitialized == 1;
23583 
23584       memoizedIsInitialized = 1;
23585       return true;
23586     }
23587 
writeTo(com.google.protobuf.CodedOutputStream output)23588     public void writeTo(com.google.protobuf.CodedOutputStream output)
23589                         throws java.io.IOException {
23590       getSerializedSize();
23591       getUnknownFields().writeTo(output);
23592     }
23593 
23594     private int memoizedSerializedSize = -1;
getSerializedSize()23595     public int getSerializedSize() {
23596       int size = memoizedSerializedSize;
23597       if (size != -1) return size;
23598 
23599       size = 0;
23600       size += getUnknownFields().getSerializedSize();
23601       memoizedSerializedSize = size;
23602       return size;
23603     }
23604 
23605     private static final long serialVersionUID = 0L;
23606     @java.lang.Override
writeReplace()23607     protected java.lang.Object writeReplace()
23608         throws java.io.ObjectStreamException {
23609       return super.writeReplace();
23610     }
23611 
23612     @java.lang.Override
equals(final java.lang.Object obj)23613     public boolean equals(final java.lang.Object obj) {
23614       if (obj == this) {
23615        return true;
23616       }
23617       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)) {
23618         return super.equals(obj);
23619       }
23620       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest) obj;
23621 
23622       boolean result = true;
23623       result = result &&
23624           getUnknownFields().equals(other.getUnknownFields());
23625       return result;
23626     }
23627 
23628     private int memoizedHashCode = 0;
23629     @java.lang.Override
hashCode()23630     public int hashCode() {
23631       if (memoizedHashCode != 0) {
23632         return memoizedHashCode;
23633       }
23634       int hash = 41;
23635       hash = (19 * hash) + getDescriptorForType().hashCode();
23636       hash = (29 * hash) + getUnknownFields().hashCode();
23637       memoizedHashCode = hash;
23638       return hash;
23639     }
23640 
parseFrom( com.google.protobuf.ByteString data)23641     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(
23642         com.google.protobuf.ByteString data)
23643         throws com.google.protobuf.InvalidProtocolBufferException {
23644       return PARSER.parseFrom(data);
23645     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23646     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(
23647         com.google.protobuf.ByteString data,
23648         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23649         throws com.google.protobuf.InvalidProtocolBufferException {
23650       return PARSER.parseFrom(data, extensionRegistry);
23651     }
parseFrom(byte[] data)23652     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(byte[] data)
23653         throws com.google.protobuf.InvalidProtocolBufferException {
23654       return PARSER.parseFrom(data);
23655     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23656     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(
23657         byte[] data,
23658         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23659         throws com.google.protobuf.InvalidProtocolBufferException {
23660       return PARSER.parseFrom(data, extensionRegistry);
23661     }
parseFrom(java.io.InputStream input)23662     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(java.io.InputStream input)
23663         throws java.io.IOException {
23664       return PARSER.parseFrom(input);
23665     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23666     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(
23667         java.io.InputStream input,
23668         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23669         throws java.io.IOException {
23670       return PARSER.parseFrom(input, extensionRegistry);
23671     }
parseDelimitedFrom(java.io.InputStream input)23672     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseDelimitedFrom(java.io.InputStream input)
23673         throws java.io.IOException {
23674       return PARSER.parseDelimitedFrom(input);
23675     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23676     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseDelimitedFrom(
23677         java.io.InputStream input,
23678         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23679         throws java.io.IOException {
23680       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23681     }
parseFrom( com.google.protobuf.CodedInputStream input)23682     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(
23683         com.google.protobuf.CodedInputStream input)
23684         throws java.io.IOException {
23685       return PARSER.parseFrom(input);
23686     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23687     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parseFrom(
23688         com.google.protobuf.CodedInputStream input,
23689         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23690         throws java.io.IOException {
23691       return PARSER.parseFrom(input, extensionRegistry);
23692     }
23693 
newBuilder()23694     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()23695     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest prototype)23696     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest prototype) {
23697       return newBuilder().mergeFrom(prototype);
23698     }
toBuilder()23699     public Builder toBuilder() { return newBuilder(this); }
23700 
23701     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)23702     protected Builder newBuilderForType(
23703         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23704       Builder builder = new Builder(parent);
23705       return builder;
23706     }
23707     /**
23708      * Protobuf type {@code ShutdownRequest}
23709      */
23710     public static final class Builder extends
23711         com.google.protobuf.GeneratedMessage.Builder<Builder>
23712        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequestOrBuilder {
23713       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23714           getDescriptor() {
23715         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_descriptor;
23716       }
23717 
23718       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23719           internalGetFieldAccessorTable() {
23720         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_fieldAccessorTable
23721             .ensureFieldAccessorsInitialized(
23722                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.Builder.class);
23723       }
23724 
23725       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.newBuilder()
Builder()23726       private Builder() {
23727         maybeForceBuilderInitialization();
23728       }
23729 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)23730       private Builder(
23731           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23732         super(parent);
23733         maybeForceBuilderInitialization();
23734       }
maybeForceBuilderInitialization()23735       private void maybeForceBuilderInitialization() {
23736         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23737         }
23738       }
create()23739       private static Builder create() {
23740         return new Builder();
23741       }
23742 
clear()23743       public Builder clear() {
23744         super.clear();
23745         return this;
23746       }
23747 
clone()23748       public Builder clone() {
23749         return create().mergeFrom(buildPartial());
23750       }
23751 
23752       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()23753           getDescriptorForType() {
23754         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownRequest_descriptor;
23755       }
23756 
getDefaultInstanceForType()23757       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest getDefaultInstanceForType() {
23758         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance();
23759       }
23760 
build()23761       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest build() {
23762         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest result = buildPartial();
23763         if (!result.isInitialized()) {
23764           throw newUninitializedMessageException(result);
23765         }
23766         return result;
23767       }
23768 
buildPartial()23769       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest buildPartial() {
23770         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest(this);
23771         onBuilt();
23772         return result;
23773       }
23774 
mergeFrom(com.google.protobuf.Message other)23775       public Builder mergeFrom(com.google.protobuf.Message other) {
23776         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest) {
23777           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)other);
23778         } else {
23779           super.mergeFrom(other);
23780           return this;
23781         }
23782       }
23783 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest other)23784       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest other) {
23785         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance()) return this;
23786         this.mergeUnknownFields(other.getUnknownFields());
23787         return this;
23788       }
23789 
isInitialized()23790       public final boolean isInitialized() {
23791         return true;
23792       }
23793 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23794       public Builder mergeFrom(
23795           com.google.protobuf.CodedInputStream input,
23796           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23797           throws java.io.IOException {
23798         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest parsedMessage = null;
23799         try {
23800           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23801         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23802           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest) e.getUnfinishedMessage();
23803           throw e;
23804         } finally {
23805           if (parsedMessage != null) {
23806             mergeFrom(parsedMessage);
23807           }
23808         }
23809         return this;
23810       }
23811 
23812       // @@protoc_insertion_point(builder_scope:ShutdownRequest)
23813     }
23814 
23815     static {
23816       defaultInstance = new ShutdownRequest(true);
defaultInstance.initFields()23817       defaultInstance.initFields();
23818     }
23819 
23820     // @@protoc_insertion_point(class_scope:ShutdownRequest)
23821   }
23822 
23823   public interface ShutdownResponseOrBuilder
23824       extends com.google.protobuf.MessageOrBuilder {
23825   }
23826   /**
23827    * Protobuf type {@code ShutdownResponse}
23828    */
23829   public static final class ShutdownResponse extends
23830       com.google.protobuf.GeneratedMessage
23831       implements ShutdownResponseOrBuilder {
23832     // Use ShutdownResponse.newBuilder() to construct.
ShutdownResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)23833     private ShutdownResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
23834       super(builder);
23835       this.unknownFields = builder.getUnknownFields();
23836     }
ShutdownResponse(boolean noInit)23837     private ShutdownResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
23838 
23839     private static final ShutdownResponse defaultInstance;
getDefaultInstance()23840     public static ShutdownResponse getDefaultInstance() {
23841       return defaultInstance;
23842     }
23843 
getDefaultInstanceForType()23844     public ShutdownResponse getDefaultInstanceForType() {
23845       return defaultInstance;
23846     }
23847 
23848     private final com.google.protobuf.UnknownFieldSet unknownFields;
23849     @java.lang.Override
23850     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()23851         getUnknownFields() {
23852       return this.unknownFields;
23853     }
ShutdownResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23854     private ShutdownResponse(
23855         com.google.protobuf.CodedInputStream input,
23856         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23857         throws com.google.protobuf.InvalidProtocolBufferException {
23858       initFields();
23859       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
23860           com.google.protobuf.UnknownFieldSet.newBuilder();
23861       try {
23862         boolean done = false;
23863         while (!done) {
23864           int tag = input.readTag();
23865           switch (tag) {
23866             case 0:
23867               done = true;
23868               break;
23869             default: {
23870               if (!parseUnknownField(input, unknownFields,
23871                                      extensionRegistry, tag)) {
23872                 done = true;
23873               }
23874               break;
23875             }
23876           }
23877         }
23878       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23879         throw e.setUnfinishedMessage(this);
23880       } catch (java.io.IOException e) {
23881         throw new com.google.protobuf.InvalidProtocolBufferException(
23882             e.getMessage()).setUnfinishedMessage(this);
23883       } finally {
23884         this.unknownFields = unknownFields.build();
23885         makeExtensionsImmutable();
23886       }
23887     }
23888     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23889         getDescriptor() {
23890       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_descriptor;
23891     }
23892 
23893     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23894         internalGetFieldAccessorTable() {
23895       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_fieldAccessorTable
23896           .ensureFieldAccessorsInitialized(
23897               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.Builder.class);
23898     }
23899 
23900     public static com.google.protobuf.Parser<ShutdownResponse> PARSER =
23901         new com.google.protobuf.AbstractParser<ShutdownResponse>() {
23902       public ShutdownResponse parsePartialFrom(
23903           com.google.protobuf.CodedInputStream input,
23904           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23905           throws com.google.protobuf.InvalidProtocolBufferException {
23906         return new ShutdownResponse(input, extensionRegistry);
23907       }
23908     };
23909 
23910     @java.lang.Override
getParserForType()23911     public com.google.protobuf.Parser<ShutdownResponse> getParserForType() {
23912       return PARSER;
23913     }
23914 
initFields()23915     private void initFields() {
23916     }
23917     private byte memoizedIsInitialized = -1;
isInitialized()23918     public final boolean isInitialized() {
23919       byte isInitialized = memoizedIsInitialized;
23920       if (isInitialized != -1) return isInitialized == 1;
23921 
23922       memoizedIsInitialized = 1;
23923       return true;
23924     }
23925 
writeTo(com.google.protobuf.CodedOutputStream output)23926     public void writeTo(com.google.protobuf.CodedOutputStream output)
23927                         throws java.io.IOException {
23928       getSerializedSize();
23929       getUnknownFields().writeTo(output);
23930     }
23931 
23932     private int memoizedSerializedSize = -1;
getSerializedSize()23933     public int getSerializedSize() {
23934       int size = memoizedSerializedSize;
23935       if (size != -1) return size;
23936 
23937       size = 0;
23938       size += getUnknownFields().getSerializedSize();
23939       memoizedSerializedSize = size;
23940       return size;
23941     }
23942 
23943     private static final long serialVersionUID = 0L;
23944     @java.lang.Override
writeReplace()23945     protected java.lang.Object writeReplace()
23946         throws java.io.ObjectStreamException {
23947       return super.writeReplace();
23948     }
23949 
23950     @java.lang.Override
equals(final java.lang.Object obj)23951     public boolean equals(final java.lang.Object obj) {
23952       if (obj == this) {
23953        return true;
23954       }
23955       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse)) {
23956         return super.equals(obj);
23957       }
23958       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) obj;
23959 
23960       boolean result = true;
23961       result = result &&
23962           getUnknownFields().equals(other.getUnknownFields());
23963       return result;
23964     }
23965 
23966     private int memoizedHashCode = 0;
23967     @java.lang.Override
hashCode()23968     public int hashCode() {
23969       if (memoizedHashCode != 0) {
23970         return memoizedHashCode;
23971       }
23972       int hash = 41;
23973       hash = (19 * hash) + getDescriptorForType().hashCode();
23974       hash = (29 * hash) + getUnknownFields().hashCode();
23975       memoizedHashCode = hash;
23976       return hash;
23977     }
23978 
parseFrom( com.google.protobuf.ByteString data)23979     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(
23980         com.google.protobuf.ByteString data)
23981         throws com.google.protobuf.InvalidProtocolBufferException {
23982       return PARSER.parseFrom(data);
23983     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23984     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(
23985         com.google.protobuf.ByteString data,
23986         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23987         throws com.google.protobuf.InvalidProtocolBufferException {
23988       return PARSER.parseFrom(data, extensionRegistry);
23989     }
parseFrom(byte[] data)23990     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(byte[] data)
23991         throws com.google.protobuf.InvalidProtocolBufferException {
23992       return PARSER.parseFrom(data);
23993     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23994     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(
23995         byte[] data,
23996         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23997         throws com.google.protobuf.InvalidProtocolBufferException {
23998       return PARSER.parseFrom(data, extensionRegistry);
23999     }
parseFrom(java.io.InputStream input)24000     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(java.io.InputStream input)
24001         throws java.io.IOException {
24002       return PARSER.parseFrom(input);
24003     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24004     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(
24005         java.io.InputStream input,
24006         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24007         throws java.io.IOException {
24008       return PARSER.parseFrom(input, extensionRegistry);
24009     }
parseDelimitedFrom(java.io.InputStream input)24010     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseDelimitedFrom(java.io.InputStream input)
24011         throws java.io.IOException {
24012       return PARSER.parseDelimitedFrom(input);
24013     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24014     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseDelimitedFrom(
24015         java.io.InputStream input,
24016         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24017         throws java.io.IOException {
24018       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24019     }
parseFrom( com.google.protobuf.CodedInputStream input)24020     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(
24021         com.google.protobuf.CodedInputStream input)
24022         throws java.io.IOException {
24023       return PARSER.parseFrom(input);
24024     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24025     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parseFrom(
24026         com.google.protobuf.CodedInputStream input,
24027         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24028         throws java.io.IOException {
24029       return PARSER.parseFrom(input, extensionRegistry);
24030     }
24031 
newBuilder()24032     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()24033     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse prototype)24034     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse prototype) {
24035       return newBuilder().mergeFrom(prototype);
24036     }
toBuilder()24037     public Builder toBuilder() { return newBuilder(this); }
24038 
24039     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)24040     protected Builder newBuilderForType(
24041         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24042       Builder builder = new Builder(parent);
24043       return builder;
24044     }
24045     /**
24046      * Protobuf type {@code ShutdownResponse}
24047      */
24048     public static final class Builder extends
24049         com.google.protobuf.GeneratedMessage.Builder<Builder>
24050        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponseOrBuilder {
24051       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24052           getDescriptor() {
24053         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_descriptor;
24054       }
24055 
24056       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24057           internalGetFieldAccessorTable() {
24058         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_fieldAccessorTable
24059             .ensureFieldAccessorsInitialized(
24060                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.Builder.class);
24061       }
24062 
24063       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.newBuilder()
Builder()24064       private Builder() {
24065         maybeForceBuilderInitialization();
24066       }
24067 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)24068       private Builder(
24069           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24070         super(parent);
24071         maybeForceBuilderInitialization();
24072       }
maybeForceBuilderInitialization()24073       private void maybeForceBuilderInitialization() {
24074         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24075         }
24076       }
create()24077       private static Builder create() {
24078         return new Builder();
24079       }
24080 
clear()24081       public Builder clear() {
24082         super.clear();
24083         return this;
24084       }
24085 
clone()24086       public Builder clone() {
24087         return create().mergeFrom(buildPartial());
24088       }
24089 
24090       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()24091           getDescriptorForType() {
24092         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ShutdownResponse_descriptor;
24093       }
24094 
getDefaultInstanceForType()24095       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse getDefaultInstanceForType() {
24096         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance();
24097       }
24098 
build()24099       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse build() {
24100         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse result = buildPartial();
24101         if (!result.isInitialized()) {
24102           throw newUninitializedMessageException(result);
24103         }
24104         return result;
24105       }
24106 
buildPartial()24107       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse buildPartial() {
24108         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse(this);
24109         onBuilt();
24110         return result;
24111       }
24112 
mergeFrom(com.google.protobuf.Message other)24113       public Builder mergeFrom(com.google.protobuf.Message other) {
24114         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) {
24115           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse)other);
24116         } else {
24117           super.mergeFrom(other);
24118           return this;
24119         }
24120       }
24121 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse other)24122       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse other) {
24123         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance()) return this;
24124         this.mergeUnknownFields(other.getUnknownFields());
24125         return this;
24126       }
24127 
isInitialized()24128       public final boolean isInitialized() {
24129         return true;
24130       }
24131 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24132       public Builder mergeFrom(
24133           com.google.protobuf.CodedInputStream input,
24134           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24135           throws java.io.IOException {
24136         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse parsedMessage = null;
24137         try {
24138           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
24139         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24140           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) e.getUnfinishedMessage();
24141           throw e;
24142         } finally {
24143           if (parsedMessage != null) {
24144             mergeFrom(parsedMessage);
24145           }
24146         }
24147         return this;
24148       }
24149 
24150       // @@protoc_insertion_point(builder_scope:ShutdownResponse)
24151     }
24152 
24153     static {
24154       defaultInstance = new ShutdownResponse(true);
defaultInstance.initFields()24155       defaultInstance.initFields();
24156     }
24157 
24158     // @@protoc_insertion_point(class_scope:ShutdownResponse)
24159   }
24160 
24161   public interface StopMasterRequestOrBuilder
24162       extends com.google.protobuf.MessageOrBuilder {
24163   }
24164   /**
24165    * Protobuf type {@code StopMasterRequest}
24166    */
24167   public static final class StopMasterRequest extends
24168       com.google.protobuf.GeneratedMessage
24169       implements StopMasterRequestOrBuilder {
24170     // Use StopMasterRequest.newBuilder() to construct.
StopMasterRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)24171     private StopMasterRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24172       super(builder);
24173       this.unknownFields = builder.getUnknownFields();
24174     }
StopMasterRequest(boolean noInit)24175     private StopMasterRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24176 
24177     private static final StopMasterRequest defaultInstance;
getDefaultInstance()24178     public static StopMasterRequest getDefaultInstance() {
24179       return defaultInstance;
24180     }
24181 
getDefaultInstanceForType()24182     public StopMasterRequest getDefaultInstanceForType() {
24183       return defaultInstance;
24184     }
24185 
24186     private final com.google.protobuf.UnknownFieldSet unknownFields;
24187     @java.lang.Override
24188     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()24189         getUnknownFields() {
24190       return this.unknownFields;
24191     }
StopMasterRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24192     private StopMasterRequest(
24193         com.google.protobuf.CodedInputStream input,
24194         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24195         throws com.google.protobuf.InvalidProtocolBufferException {
24196       initFields();
24197       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24198           com.google.protobuf.UnknownFieldSet.newBuilder();
24199       try {
24200         boolean done = false;
24201         while (!done) {
24202           int tag = input.readTag();
24203           switch (tag) {
24204             case 0:
24205               done = true;
24206               break;
24207             default: {
24208               if (!parseUnknownField(input, unknownFields,
24209                                      extensionRegistry, tag)) {
24210                 done = true;
24211               }
24212               break;
24213             }
24214           }
24215         }
24216       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24217         throw e.setUnfinishedMessage(this);
24218       } catch (java.io.IOException e) {
24219         throw new com.google.protobuf.InvalidProtocolBufferException(
24220             e.getMessage()).setUnfinishedMessage(this);
24221       } finally {
24222         this.unknownFields = unknownFields.build();
24223         makeExtensionsImmutable();
24224       }
24225     }
24226     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24227         getDescriptor() {
24228       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_descriptor;
24229     }
24230 
24231     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24232         internalGetFieldAccessorTable() {
24233       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_fieldAccessorTable
24234           .ensureFieldAccessorsInitialized(
24235               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.Builder.class);
24236     }
24237 
24238     public static com.google.protobuf.Parser<StopMasterRequest> PARSER =
24239         new com.google.protobuf.AbstractParser<StopMasterRequest>() {
24240       public StopMasterRequest parsePartialFrom(
24241           com.google.protobuf.CodedInputStream input,
24242           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24243           throws com.google.protobuf.InvalidProtocolBufferException {
24244         return new StopMasterRequest(input, extensionRegistry);
24245       }
24246     };
24247 
24248     @java.lang.Override
getParserForType()24249     public com.google.protobuf.Parser<StopMasterRequest> getParserForType() {
24250       return PARSER;
24251     }
24252 
initFields()24253     private void initFields() {
24254     }
24255     private byte memoizedIsInitialized = -1;
isInitialized()24256     public final boolean isInitialized() {
24257       byte isInitialized = memoizedIsInitialized;
24258       if (isInitialized != -1) return isInitialized == 1;
24259 
24260       memoizedIsInitialized = 1;
24261       return true;
24262     }
24263 
writeTo(com.google.protobuf.CodedOutputStream output)24264     public void writeTo(com.google.protobuf.CodedOutputStream output)
24265                         throws java.io.IOException {
24266       getSerializedSize();
24267       getUnknownFields().writeTo(output);
24268     }
24269 
24270     private int memoizedSerializedSize = -1;
getSerializedSize()24271     public int getSerializedSize() {
24272       int size = memoizedSerializedSize;
24273       if (size != -1) return size;
24274 
24275       size = 0;
24276       size += getUnknownFields().getSerializedSize();
24277       memoizedSerializedSize = size;
24278       return size;
24279     }
24280 
24281     private static final long serialVersionUID = 0L;
24282     @java.lang.Override
writeReplace()24283     protected java.lang.Object writeReplace()
24284         throws java.io.ObjectStreamException {
24285       return super.writeReplace();
24286     }
24287 
24288     @java.lang.Override
equals(final java.lang.Object obj)24289     public boolean equals(final java.lang.Object obj) {
24290       if (obj == this) {
24291        return true;
24292       }
24293       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)) {
24294         return super.equals(obj);
24295       }
24296       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest) obj;
24297 
24298       boolean result = true;
24299       result = result &&
24300           getUnknownFields().equals(other.getUnknownFields());
24301       return result;
24302     }
24303 
24304     private int memoizedHashCode = 0;
24305     @java.lang.Override
hashCode()24306     public int hashCode() {
24307       if (memoizedHashCode != 0) {
24308         return memoizedHashCode;
24309       }
24310       int hash = 41;
24311       hash = (19 * hash) + getDescriptorForType().hashCode();
24312       hash = (29 * hash) + getUnknownFields().hashCode();
24313       memoizedHashCode = hash;
24314       return hash;
24315     }
24316 
parseFrom( com.google.protobuf.ByteString data)24317     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(
24318         com.google.protobuf.ByteString data)
24319         throws com.google.protobuf.InvalidProtocolBufferException {
24320       return PARSER.parseFrom(data);
24321     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24322     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(
24323         com.google.protobuf.ByteString data,
24324         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24325         throws com.google.protobuf.InvalidProtocolBufferException {
24326       return PARSER.parseFrom(data, extensionRegistry);
24327     }
parseFrom(byte[] data)24328     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(byte[] data)
24329         throws com.google.protobuf.InvalidProtocolBufferException {
24330       return PARSER.parseFrom(data);
24331     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24332     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(
24333         byte[] data,
24334         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24335         throws com.google.protobuf.InvalidProtocolBufferException {
24336       return PARSER.parseFrom(data, extensionRegistry);
24337     }
parseFrom(java.io.InputStream input)24338     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(java.io.InputStream input)
24339         throws java.io.IOException {
24340       return PARSER.parseFrom(input);
24341     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24342     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(
24343         java.io.InputStream input,
24344         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24345         throws java.io.IOException {
24346       return PARSER.parseFrom(input, extensionRegistry);
24347     }
parseDelimitedFrom(java.io.InputStream input)24348     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseDelimitedFrom(java.io.InputStream input)
24349         throws java.io.IOException {
24350       return PARSER.parseDelimitedFrom(input);
24351     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24352     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseDelimitedFrom(
24353         java.io.InputStream input,
24354         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24355         throws java.io.IOException {
24356       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24357     }
parseFrom( com.google.protobuf.CodedInputStream input)24358     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(
24359         com.google.protobuf.CodedInputStream input)
24360         throws java.io.IOException {
24361       return PARSER.parseFrom(input);
24362     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24363     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parseFrom(
24364         com.google.protobuf.CodedInputStream input,
24365         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24366         throws java.io.IOException {
24367       return PARSER.parseFrom(input, extensionRegistry);
24368     }
24369 
newBuilder()24370     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()24371     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest prototype)24372     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest prototype) {
24373       return newBuilder().mergeFrom(prototype);
24374     }
toBuilder()24375     public Builder toBuilder() { return newBuilder(this); }
24376 
24377     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)24378     protected Builder newBuilderForType(
24379         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24380       Builder builder = new Builder(parent);
24381       return builder;
24382     }
24383     /**
24384      * Protobuf type {@code StopMasterRequest}
24385      */
24386     public static final class Builder extends
24387         com.google.protobuf.GeneratedMessage.Builder<Builder>
24388        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequestOrBuilder {
24389       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24390           getDescriptor() {
24391         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_descriptor;
24392       }
24393 
24394       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24395           internalGetFieldAccessorTable() {
24396         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_fieldAccessorTable
24397             .ensureFieldAccessorsInitialized(
24398                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.Builder.class);
24399       }
24400 
24401       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.newBuilder()
Builder()24402       private Builder() {
24403         maybeForceBuilderInitialization();
24404       }
24405 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)24406       private Builder(
24407           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24408         super(parent);
24409         maybeForceBuilderInitialization();
24410       }
maybeForceBuilderInitialization()24411       private void maybeForceBuilderInitialization() {
24412         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24413         }
24414       }
create()24415       private static Builder create() {
24416         return new Builder();
24417       }
24418 
clear()24419       public Builder clear() {
24420         super.clear();
24421         return this;
24422       }
24423 
clone()24424       public Builder clone() {
24425         return create().mergeFrom(buildPartial());
24426       }
24427 
24428       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()24429           getDescriptorForType() {
24430         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterRequest_descriptor;
24431       }
24432 
getDefaultInstanceForType()24433       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest getDefaultInstanceForType() {
24434         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance();
24435       }
24436 
build()24437       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest build() {
24438         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest result = buildPartial();
24439         if (!result.isInitialized()) {
24440           throw newUninitializedMessageException(result);
24441         }
24442         return result;
24443       }
24444 
buildPartial()24445       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest buildPartial() {
24446         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest(this);
24447         onBuilt();
24448         return result;
24449       }
24450 
mergeFrom(com.google.protobuf.Message other)24451       public Builder mergeFrom(com.google.protobuf.Message other) {
24452         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest) {
24453           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)other);
24454         } else {
24455           super.mergeFrom(other);
24456           return this;
24457         }
24458       }
24459 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest other)24460       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest other) {
24461         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance()) return this;
24462         this.mergeUnknownFields(other.getUnknownFields());
24463         return this;
24464       }
24465 
isInitialized()24466       public final boolean isInitialized() {
24467         return true;
24468       }
24469 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24470       public Builder mergeFrom(
24471           com.google.protobuf.CodedInputStream input,
24472           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24473           throws java.io.IOException {
24474         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest parsedMessage = null;
24475         try {
24476           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
24477         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24478           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest) e.getUnfinishedMessage();
24479           throw e;
24480         } finally {
24481           if (parsedMessage != null) {
24482             mergeFrom(parsedMessage);
24483           }
24484         }
24485         return this;
24486       }
24487 
24488       // @@protoc_insertion_point(builder_scope:StopMasterRequest)
24489     }
24490 
24491     static {
24492       defaultInstance = new StopMasterRequest(true);
defaultInstance.initFields()24493       defaultInstance.initFields();
24494     }
24495 
24496     // @@protoc_insertion_point(class_scope:StopMasterRequest)
24497   }
24498 
24499   public interface StopMasterResponseOrBuilder
24500       extends com.google.protobuf.MessageOrBuilder {
24501   }
24502   /**
24503    * Protobuf type {@code StopMasterResponse}
24504    */
24505   public static final class StopMasterResponse extends
24506       com.google.protobuf.GeneratedMessage
24507       implements StopMasterResponseOrBuilder {
24508     // Use StopMasterResponse.newBuilder() to construct.
StopMasterResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)24509     private StopMasterResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24510       super(builder);
24511       this.unknownFields = builder.getUnknownFields();
24512     }
StopMasterResponse(boolean noInit)24513     private StopMasterResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24514 
24515     private static final StopMasterResponse defaultInstance;
getDefaultInstance()24516     public static StopMasterResponse getDefaultInstance() {
24517       return defaultInstance;
24518     }
24519 
getDefaultInstanceForType()24520     public StopMasterResponse getDefaultInstanceForType() {
24521       return defaultInstance;
24522     }
24523 
24524     private final com.google.protobuf.UnknownFieldSet unknownFields;
24525     @java.lang.Override
24526     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()24527         getUnknownFields() {
24528       return this.unknownFields;
24529     }
StopMasterResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24530     private StopMasterResponse(
24531         com.google.protobuf.CodedInputStream input,
24532         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24533         throws com.google.protobuf.InvalidProtocolBufferException {
24534       initFields();
24535       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24536           com.google.protobuf.UnknownFieldSet.newBuilder();
24537       try {
24538         boolean done = false;
24539         while (!done) {
24540           int tag = input.readTag();
24541           switch (tag) {
24542             case 0:
24543               done = true;
24544               break;
24545             default: {
24546               if (!parseUnknownField(input, unknownFields,
24547                                      extensionRegistry, tag)) {
24548                 done = true;
24549               }
24550               break;
24551             }
24552           }
24553         }
24554       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24555         throw e.setUnfinishedMessage(this);
24556       } catch (java.io.IOException e) {
24557         throw new com.google.protobuf.InvalidProtocolBufferException(
24558             e.getMessage()).setUnfinishedMessage(this);
24559       } finally {
24560         this.unknownFields = unknownFields.build();
24561         makeExtensionsImmutable();
24562       }
24563     }
24564     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24565         getDescriptor() {
24566       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_descriptor;
24567     }
24568 
24569     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24570         internalGetFieldAccessorTable() {
24571       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_fieldAccessorTable
24572           .ensureFieldAccessorsInitialized(
24573               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.Builder.class);
24574     }
24575 
24576     public static com.google.protobuf.Parser<StopMasterResponse> PARSER =
24577         new com.google.protobuf.AbstractParser<StopMasterResponse>() {
24578       public StopMasterResponse parsePartialFrom(
24579           com.google.protobuf.CodedInputStream input,
24580           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24581           throws com.google.protobuf.InvalidProtocolBufferException {
24582         return new StopMasterResponse(input, extensionRegistry);
24583       }
24584     };
24585 
24586     @java.lang.Override
getParserForType()24587     public com.google.protobuf.Parser<StopMasterResponse> getParserForType() {
24588       return PARSER;
24589     }
24590 
initFields()24591     private void initFields() {
24592     }
24593     private byte memoizedIsInitialized = -1;
isInitialized()24594     public final boolean isInitialized() {
24595       byte isInitialized = memoizedIsInitialized;
24596       if (isInitialized != -1) return isInitialized == 1;
24597 
24598       memoizedIsInitialized = 1;
24599       return true;
24600     }
24601 
writeTo(com.google.protobuf.CodedOutputStream output)24602     public void writeTo(com.google.protobuf.CodedOutputStream output)
24603                         throws java.io.IOException {
24604       getSerializedSize();
24605       getUnknownFields().writeTo(output);
24606     }
24607 
24608     private int memoizedSerializedSize = -1;
getSerializedSize()24609     public int getSerializedSize() {
24610       int size = memoizedSerializedSize;
24611       if (size != -1) return size;
24612 
24613       size = 0;
24614       size += getUnknownFields().getSerializedSize();
24615       memoizedSerializedSize = size;
24616       return size;
24617     }
24618 
24619     private static final long serialVersionUID = 0L;
24620     @java.lang.Override
writeReplace()24621     protected java.lang.Object writeReplace()
24622         throws java.io.ObjectStreamException {
24623       return super.writeReplace();
24624     }
24625 
24626     @java.lang.Override
equals(final java.lang.Object obj)24627     public boolean equals(final java.lang.Object obj) {
24628       if (obj == this) {
24629        return true;
24630       }
24631       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse)) {
24632         return super.equals(obj);
24633       }
24634       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) obj;
24635 
24636       boolean result = true;
24637       result = result &&
24638           getUnknownFields().equals(other.getUnknownFields());
24639       return result;
24640     }
24641 
24642     private int memoizedHashCode = 0;
24643     @java.lang.Override
hashCode()24644     public int hashCode() {
24645       if (memoizedHashCode != 0) {
24646         return memoizedHashCode;
24647       }
24648       int hash = 41;
24649       hash = (19 * hash) + getDescriptorForType().hashCode();
24650       hash = (29 * hash) + getUnknownFields().hashCode();
24651       memoizedHashCode = hash;
24652       return hash;
24653     }
24654 
parseFrom( com.google.protobuf.ByteString data)24655     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(
24656         com.google.protobuf.ByteString data)
24657         throws com.google.protobuf.InvalidProtocolBufferException {
24658       return PARSER.parseFrom(data);
24659     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24660     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(
24661         com.google.protobuf.ByteString data,
24662         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24663         throws com.google.protobuf.InvalidProtocolBufferException {
24664       return PARSER.parseFrom(data, extensionRegistry);
24665     }
parseFrom(byte[] data)24666     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(byte[] data)
24667         throws com.google.protobuf.InvalidProtocolBufferException {
24668       return PARSER.parseFrom(data);
24669     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24670     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(
24671         byte[] data,
24672         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24673         throws com.google.protobuf.InvalidProtocolBufferException {
24674       return PARSER.parseFrom(data, extensionRegistry);
24675     }
parseFrom(java.io.InputStream input)24676     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(java.io.InputStream input)
24677         throws java.io.IOException {
24678       return PARSER.parseFrom(input);
24679     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24680     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(
24681         java.io.InputStream input,
24682         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24683         throws java.io.IOException {
24684       return PARSER.parseFrom(input, extensionRegistry);
24685     }
parseDelimitedFrom(java.io.InputStream input)24686     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseDelimitedFrom(java.io.InputStream input)
24687         throws java.io.IOException {
24688       return PARSER.parseDelimitedFrom(input);
24689     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24690     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseDelimitedFrom(
24691         java.io.InputStream input,
24692         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24693         throws java.io.IOException {
24694       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24695     }
parseFrom( com.google.protobuf.CodedInputStream input)24696     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(
24697         com.google.protobuf.CodedInputStream input)
24698         throws java.io.IOException {
24699       return PARSER.parseFrom(input);
24700     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24701     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parseFrom(
24702         com.google.protobuf.CodedInputStream input,
24703         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24704         throws java.io.IOException {
24705       return PARSER.parseFrom(input, extensionRegistry);
24706     }
24707 
newBuilder()24708     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()24709     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse prototype)24710     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse prototype) {
24711       return newBuilder().mergeFrom(prototype);
24712     }
toBuilder()24713     public Builder toBuilder() { return newBuilder(this); }
24714 
24715     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)24716     protected Builder newBuilderForType(
24717         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24718       Builder builder = new Builder(parent);
24719       return builder;
24720     }
24721     /**
24722      * Protobuf type {@code StopMasterResponse}
24723      */
24724     public static final class Builder extends
24725         com.google.protobuf.GeneratedMessage.Builder<Builder>
24726        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponseOrBuilder {
24727       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24728           getDescriptor() {
24729         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_descriptor;
24730       }
24731 
24732       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24733           internalGetFieldAccessorTable() {
24734         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_fieldAccessorTable
24735             .ensureFieldAccessorsInitialized(
24736                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.Builder.class);
24737       }
24738 
24739       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.newBuilder()
Builder()24740       private Builder() {
24741         maybeForceBuilderInitialization();
24742       }
24743 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)24744       private Builder(
24745           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24746         super(parent);
24747         maybeForceBuilderInitialization();
24748       }
maybeForceBuilderInitialization()24749       private void maybeForceBuilderInitialization() {
24750         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24751         }
24752       }
create()24753       private static Builder create() {
24754         return new Builder();
24755       }
24756 
clear()24757       public Builder clear() {
24758         super.clear();
24759         return this;
24760       }
24761 
clone()24762       public Builder clone() {
24763         return create().mergeFrom(buildPartial());
24764       }
24765 
24766       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()24767           getDescriptorForType() {
24768         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_StopMasterResponse_descriptor;
24769       }
24770 
getDefaultInstanceForType()24771       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse getDefaultInstanceForType() {
24772         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance();
24773       }
24774 
build()24775       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse build() {
24776         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse result = buildPartial();
24777         if (!result.isInitialized()) {
24778           throw newUninitializedMessageException(result);
24779         }
24780         return result;
24781       }
24782 
buildPartial()24783       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse buildPartial() {
24784         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse(this);
24785         onBuilt();
24786         return result;
24787       }
24788 
mergeFrom(com.google.protobuf.Message other)24789       public Builder mergeFrom(com.google.protobuf.Message other) {
24790         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) {
24791           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse)other);
24792         } else {
24793           super.mergeFrom(other);
24794           return this;
24795         }
24796       }
24797 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse other)24798       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse other) {
24799         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance()) return this;
24800         this.mergeUnknownFields(other.getUnknownFields());
24801         return this;
24802       }
24803 
isInitialized()24804       public final boolean isInitialized() {
24805         return true;
24806       }
24807 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24808       public Builder mergeFrom(
24809           com.google.protobuf.CodedInputStream input,
24810           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24811           throws java.io.IOException {
24812         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse parsedMessage = null;
24813         try {
24814           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
24815         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24816           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) e.getUnfinishedMessage();
24817           throw e;
24818         } finally {
24819           if (parsedMessage != null) {
24820             mergeFrom(parsedMessage);
24821           }
24822         }
24823         return this;
24824       }
24825 
24826       // @@protoc_insertion_point(builder_scope:StopMasterResponse)
24827     }
24828 
24829     static {
24830       defaultInstance = new StopMasterResponse(true);
defaultInstance.initFields()24831       defaultInstance.initFields();
24832     }
24833 
24834     // @@protoc_insertion_point(class_scope:StopMasterResponse)
24835   }
24836 
24837   public interface BalanceRequestOrBuilder
24838       extends com.google.protobuf.MessageOrBuilder {
24839   }
24840   /**
24841    * Protobuf type {@code BalanceRequest}
24842    */
24843   public static final class BalanceRequest extends
24844       com.google.protobuf.GeneratedMessage
24845       implements BalanceRequestOrBuilder {
24846     // Use BalanceRequest.newBuilder() to construct.
BalanceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)24847     private BalanceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24848       super(builder);
24849       this.unknownFields = builder.getUnknownFields();
24850     }
BalanceRequest(boolean noInit)24851     private BalanceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24852 
24853     private static final BalanceRequest defaultInstance;
getDefaultInstance()24854     public static BalanceRequest getDefaultInstance() {
24855       return defaultInstance;
24856     }
24857 
getDefaultInstanceForType()24858     public BalanceRequest getDefaultInstanceForType() {
24859       return defaultInstance;
24860     }
24861 
24862     private final com.google.protobuf.UnknownFieldSet unknownFields;
24863     @java.lang.Override
24864     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()24865         getUnknownFields() {
24866       return this.unknownFields;
24867     }
BalanceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24868     private BalanceRequest(
24869         com.google.protobuf.CodedInputStream input,
24870         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24871         throws com.google.protobuf.InvalidProtocolBufferException {
24872       initFields();
24873       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24874           com.google.protobuf.UnknownFieldSet.newBuilder();
24875       try {
24876         boolean done = false;
24877         while (!done) {
24878           int tag = input.readTag();
24879           switch (tag) {
24880             case 0:
24881               done = true;
24882               break;
24883             default: {
24884               if (!parseUnknownField(input, unknownFields,
24885                                      extensionRegistry, tag)) {
24886                 done = true;
24887               }
24888               break;
24889             }
24890           }
24891         }
24892       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24893         throw e.setUnfinishedMessage(this);
24894       } catch (java.io.IOException e) {
24895         throw new com.google.protobuf.InvalidProtocolBufferException(
24896             e.getMessage()).setUnfinishedMessage(this);
24897       } finally {
24898         this.unknownFields = unknownFields.build();
24899         makeExtensionsImmutable();
24900       }
24901     }
24902     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24903         getDescriptor() {
24904       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_descriptor;
24905     }
24906 
24907     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24908         internalGetFieldAccessorTable() {
24909       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_fieldAccessorTable
24910           .ensureFieldAccessorsInitialized(
24911               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.Builder.class);
24912     }
24913 
24914     public static com.google.protobuf.Parser<BalanceRequest> PARSER =
24915         new com.google.protobuf.AbstractParser<BalanceRequest>() {
24916       public BalanceRequest parsePartialFrom(
24917           com.google.protobuf.CodedInputStream input,
24918           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24919           throws com.google.protobuf.InvalidProtocolBufferException {
24920         return new BalanceRequest(input, extensionRegistry);
24921       }
24922     };
24923 
24924     @java.lang.Override
getParserForType()24925     public com.google.protobuf.Parser<BalanceRequest> getParserForType() {
24926       return PARSER;
24927     }
24928 
initFields()24929     private void initFields() {
24930     }
24931     private byte memoizedIsInitialized = -1;
isInitialized()24932     public final boolean isInitialized() {
24933       byte isInitialized = memoizedIsInitialized;
24934       if (isInitialized != -1) return isInitialized == 1;
24935 
24936       memoizedIsInitialized = 1;
24937       return true;
24938     }
24939 
writeTo(com.google.protobuf.CodedOutputStream output)24940     public void writeTo(com.google.protobuf.CodedOutputStream output)
24941                         throws java.io.IOException {
24942       getSerializedSize();
24943       getUnknownFields().writeTo(output);
24944     }
24945 
24946     private int memoizedSerializedSize = -1;
getSerializedSize()24947     public int getSerializedSize() {
24948       int size = memoizedSerializedSize;
24949       if (size != -1) return size;
24950 
24951       size = 0;
24952       size += getUnknownFields().getSerializedSize();
24953       memoizedSerializedSize = size;
24954       return size;
24955     }
24956 
24957     private static final long serialVersionUID = 0L;
24958     @java.lang.Override
writeReplace()24959     protected java.lang.Object writeReplace()
24960         throws java.io.ObjectStreamException {
24961       return super.writeReplace();
24962     }
24963 
24964     @java.lang.Override
equals(final java.lang.Object obj)24965     public boolean equals(final java.lang.Object obj) {
24966       if (obj == this) {
24967        return true;
24968       }
24969       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)) {
24970         return super.equals(obj);
24971       }
24972       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest) obj;
24973 
24974       boolean result = true;
24975       result = result &&
24976           getUnknownFields().equals(other.getUnknownFields());
24977       return result;
24978     }
24979 
24980     private int memoizedHashCode = 0;
24981     @java.lang.Override
hashCode()24982     public int hashCode() {
24983       if (memoizedHashCode != 0) {
24984         return memoizedHashCode;
24985       }
24986       int hash = 41;
24987       hash = (19 * hash) + getDescriptorForType().hashCode();
24988       hash = (29 * hash) + getUnknownFields().hashCode();
24989       memoizedHashCode = hash;
24990       return hash;
24991     }
24992 
parseFrom( com.google.protobuf.ByteString data)24993     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(
24994         com.google.protobuf.ByteString data)
24995         throws com.google.protobuf.InvalidProtocolBufferException {
24996       return PARSER.parseFrom(data);
24997     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24998     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(
24999         com.google.protobuf.ByteString data,
25000         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25001         throws com.google.protobuf.InvalidProtocolBufferException {
25002       return PARSER.parseFrom(data, extensionRegistry);
25003     }
parseFrom(byte[] data)25004     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(byte[] data)
25005         throws com.google.protobuf.InvalidProtocolBufferException {
25006       return PARSER.parseFrom(data);
25007     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25008     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(
25009         byte[] data,
25010         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25011         throws com.google.protobuf.InvalidProtocolBufferException {
25012       return PARSER.parseFrom(data, extensionRegistry);
25013     }
parseFrom(java.io.InputStream input)25014     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(java.io.InputStream input)
25015         throws java.io.IOException {
25016       return PARSER.parseFrom(input);
25017     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25018     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(
25019         java.io.InputStream input,
25020         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25021         throws java.io.IOException {
25022       return PARSER.parseFrom(input, extensionRegistry);
25023     }
parseDelimitedFrom(java.io.InputStream input)25024     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseDelimitedFrom(java.io.InputStream input)
25025         throws java.io.IOException {
25026       return PARSER.parseDelimitedFrom(input);
25027     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25028     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseDelimitedFrom(
25029         java.io.InputStream input,
25030         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25031         throws java.io.IOException {
25032       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25033     }
parseFrom( com.google.protobuf.CodedInputStream input)25034     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(
25035         com.google.protobuf.CodedInputStream input)
25036         throws java.io.IOException {
25037       return PARSER.parseFrom(input);
25038     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25039     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parseFrom(
25040         com.google.protobuf.CodedInputStream input,
25041         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25042         throws java.io.IOException {
25043       return PARSER.parseFrom(input, extensionRegistry);
25044     }
25045 
newBuilder()25046     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()25047     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest prototype)25048     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest prototype) {
25049       return newBuilder().mergeFrom(prototype);
25050     }
toBuilder()25051     public Builder toBuilder() { return newBuilder(this); }
25052 
25053     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)25054     protected Builder newBuilderForType(
25055         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25056       Builder builder = new Builder(parent);
25057       return builder;
25058     }
25059     /**
25060      * Protobuf type {@code BalanceRequest}
25061      */
25062     public static final class Builder extends
25063         com.google.protobuf.GeneratedMessage.Builder<Builder>
25064        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequestOrBuilder {
25065       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25066           getDescriptor() {
25067         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_descriptor;
25068       }
25069 
25070       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25071           internalGetFieldAccessorTable() {
25072         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_fieldAccessorTable
25073             .ensureFieldAccessorsInitialized(
25074                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.Builder.class);
25075       }
25076 
25077       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.newBuilder()
Builder()25078       private Builder() {
25079         maybeForceBuilderInitialization();
25080       }
25081 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)25082       private Builder(
25083           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25084         super(parent);
25085         maybeForceBuilderInitialization();
25086       }
maybeForceBuilderInitialization()25087       private void maybeForceBuilderInitialization() {
25088         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25089         }
25090       }
create()25091       private static Builder create() {
25092         return new Builder();
25093       }
25094 
clear()25095       public Builder clear() {
25096         super.clear();
25097         return this;
25098       }
25099 
clone()25100       public Builder clone() {
25101         return create().mergeFrom(buildPartial());
25102       }
25103 
25104       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()25105           getDescriptorForType() {
25106         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceRequest_descriptor;
25107       }
25108 
getDefaultInstanceForType()25109       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest getDefaultInstanceForType() {
25110         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance();
25111       }
25112 
build()25113       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest build() {
25114         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest result = buildPartial();
25115         if (!result.isInitialized()) {
25116           throw newUninitializedMessageException(result);
25117         }
25118         return result;
25119       }
25120 
buildPartial()25121       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest buildPartial() {
25122         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest(this);
25123         onBuilt();
25124         return result;
25125       }
25126 
mergeFrom(com.google.protobuf.Message other)25127       public Builder mergeFrom(com.google.protobuf.Message other) {
25128         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest) {
25129           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)other);
25130         } else {
25131           super.mergeFrom(other);
25132           return this;
25133         }
25134       }
25135 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest other)25136       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest other) {
25137         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance()) return this;
25138         this.mergeUnknownFields(other.getUnknownFields());
25139         return this;
25140       }
25141 
isInitialized()25142       public final boolean isInitialized() {
25143         return true;
25144       }
25145 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25146       public Builder mergeFrom(
25147           com.google.protobuf.CodedInputStream input,
25148           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25149           throws java.io.IOException {
25150         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest parsedMessage = null;
25151         try {
25152           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25153         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25154           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest) e.getUnfinishedMessage();
25155           throw e;
25156         } finally {
25157           if (parsedMessage != null) {
25158             mergeFrom(parsedMessage);
25159           }
25160         }
25161         return this;
25162       }
25163 
25164       // @@protoc_insertion_point(builder_scope:BalanceRequest)
25165     }
25166 
25167     static {
25168       defaultInstance = new BalanceRequest(true);
defaultInstance.initFields()25169       defaultInstance.initFields();
25170     }
25171 
25172     // @@protoc_insertion_point(class_scope:BalanceRequest)
25173   }
25174 
25175   public interface BalanceResponseOrBuilder
25176       extends com.google.protobuf.MessageOrBuilder {
25177 
25178     // required bool balancer_ran = 1;
25179     /**
25180      * <code>required bool balancer_ran = 1;</code>
25181      */
hasBalancerRan()25182     boolean hasBalancerRan();
25183     /**
25184      * <code>required bool balancer_ran = 1;</code>
25185      */
getBalancerRan()25186     boolean getBalancerRan();
25187   }
25188   /**
25189    * Protobuf type {@code BalanceResponse}
25190    */
25191   public static final class BalanceResponse extends
25192       com.google.protobuf.GeneratedMessage
25193       implements BalanceResponseOrBuilder {
25194     // Use BalanceResponse.newBuilder() to construct.
BalanceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)25195     private BalanceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
25196       super(builder);
25197       this.unknownFields = builder.getUnknownFields();
25198     }
BalanceResponse(boolean noInit)25199     private BalanceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
25200 
25201     private static final BalanceResponse defaultInstance;
getDefaultInstance()25202     public static BalanceResponse getDefaultInstance() {
25203       return defaultInstance;
25204     }
25205 
getDefaultInstanceForType()25206     public BalanceResponse getDefaultInstanceForType() {
25207       return defaultInstance;
25208     }
25209 
25210     private final com.google.protobuf.UnknownFieldSet unknownFields;
25211     @java.lang.Override
25212     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()25213         getUnknownFields() {
25214       return this.unknownFields;
25215     }
BalanceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25216     private BalanceResponse(
25217         com.google.protobuf.CodedInputStream input,
25218         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25219         throws com.google.protobuf.InvalidProtocolBufferException {
25220       initFields();
25221       int mutable_bitField0_ = 0;
25222       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
25223           com.google.protobuf.UnknownFieldSet.newBuilder();
25224       try {
25225         boolean done = false;
25226         while (!done) {
25227           int tag = input.readTag();
25228           switch (tag) {
25229             case 0:
25230               done = true;
25231               break;
25232             default: {
25233               if (!parseUnknownField(input, unknownFields,
25234                                      extensionRegistry, tag)) {
25235                 done = true;
25236               }
25237               break;
25238             }
25239             case 8: {
25240               bitField0_ |= 0x00000001;
25241               balancerRan_ = input.readBool();
25242               break;
25243             }
25244           }
25245         }
25246       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25247         throw e.setUnfinishedMessage(this);
25248       } catch (java.io.IOException e) {
25249         throw new com.google.protobuf.InvalidProtocolBufferException(
25250             e.getMessage()).setUnfinishedMessage(this);
25251       } finally {
25252         this.unknownFields = unknownFields.build();
25253         makeExtensionsImmutable();
25254       }
25255     }
25256     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25257         getDescriptor() {
25258       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_descriptor;
25259     }
25260 
25261     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25262         internalGetFieldAccessorTable() {
25263       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_fieldAccessorTable
25264           .ensureFieldAccessorsInitialized(
25265               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.Builder.class);
25266     }
25267 
25268     public static com.google.protobuf.Parser<BalanceResponse> PARSER =
25269         new com.google.protobuf.AbstractParser<BalanceResponse>() {
25270       public BalanceResponse parsePartialFrom(
25271           com.google.protobuf.CodedInputStream input,
25272           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25273           throws com.google.protobuf.InvalidProtocolBufferException {
25274         return new BalanceResponse(input, extensionRegistry);
25275       }
25276     };
25277 
25278     @java.lang.Override
getParserForType()25279     public com.google.protobuf.Parser<BalanceResponse> getParserForType() {
25280       return PARSER;
25281     }
25282 
25283     private int bitField0_;
25284     // required bool balancer_ran = 1;
25285     public static final int BALANCER_RAN_FIELD_NUMBER = 1;
25286     private boolean balancerRan_;
25287     /**
25288      * <code>required bool balancer_ran = 1;</code>
25289      */
hasBalancerRan()25290     public boolean hasBalancerRan() {
25291       return ((bitField0_ & 0x00000001) == 0x00000001);
25292     }
25293     /**
25294      * <code>required bool balancer_ran = 1;</code>
25295      */
getBalancerRan()25296     public boolean getBalancerRan() {
25297       return balancerRan_;
25298     }
25299 
initFields()25300     private void initFields() {
25301       balancerRan_ = false;
25302     }
25303     private byte memoizedIsInitialized = -1;
isInitialized()25304     public final boolean isInitialized() {
25305       byte isInitialized = memoizedIsInitialized;
25306       if (isInitialized != -1) return isInitialized == 1;
25307 
25308       if (!hasBalancerRan()) {
25309         memoizedIsInitialized = 0;
25310         return false;
25311       }
25312       memoizedIsInitialized = 1;
25313       return true;
25314     }
25315 
writeTo(com.google.protobuf.CodedOutputStream output)25316     public void writeTo(com.google.protobuf.CodedOutputStream output)
25317                         throws java.io.IOException {
25318       getSerializedSize();
25319       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25320         output.writeBool(1, balancerRan_);
25321       }
25322       getUnknownFields().writeTo(output);
25323     }
25324 
25325     private int memoizedSerializedSize = -1;
getSerializedSize()25326     public int getSerializedSize() {
25327       int size = memoizedSerializedSize;
25328       if (size != -1) return size;
25329 
25330       size = 0;
25331       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25332         size += com.google.protobuf.CodedOutputStream
25333           .computeBoolSize(1, balancerRan_);
25334       }
25335       size += getUnknownFields().getSerializedSize();
25336       memoizedSerializedSize = size;
25337       return size;
25338     }
25339 
25340     private static final long serialVersionUID = 0L;
25341     @java.lang.Override
writeReplace()25342     protected java.lang.Object writeReplace()
25343         throws java.io.ObjectStreamException {
25344       return super.writeReplace();
25345     }
25346 
25347     @java.lang.Override
equals(final java.lang.Object obj)25348     public boolean equals(final java.lang.Object obj) {
25349       if (obj == this) {
25350        return true;
25351       }
25352       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse)) {
25353         return super.equals(obj);
25354       }
25355       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) obj;
25356 
25357       boolean result = true;
25358       result = result && (hasBalancerRan() == other.hasBalancerRan());
25359       if (hasBalancerRan()) {
25360         result = result && (getBalancerRan()
25361             == other.getBalancerRan());
25362       }
25363       result = result &&
25364           getUnknownFields().equals(other.getUnknownFields());
25365       return result;
25366     }
25367 
25368     private int memoizedHashCode = 0;
25369     @java.lang.Override
hashCode()25370     public int hashCode() {
25371       if (memoizedHashCode != 0) {
25372         return memoizedHashCode;
25373       }
25374       int hash = 41;
25375       hash = (19 * hash) + getDescriptorForType().hashCode();
25376       if (hasBalancerRan()) {
25377         hash = (37 * hash) + BALANCER_RAN_FIELD_NUMBER;
25378         hash = (53 * hash) + hashBoolean(getBalancerRan());
25379       }
25380       hash = (29 * hash) + getUnknownFields().hashCode();
25381       memoizedHashCode = hash;
25382       return hash;
25383     }
25384 
parseFrom( com.google.protobuf.ByteString data)25385     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(
25386         com.google.protobuf.ByteString data)
25387         throws com.google.protobuf.InvalidProtocolBufferException {
25388       return PARSER.parseFrom(data);
25389     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25390     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(
25391         com.google.protobuf.ByteString data,
25392         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25393         throws com.google.protobuf.InvalidProtocolBufferException {
25394       return PARSER.parseFrom(data, extensionRegistry);
25395     }
parseFrom(byte[] data)25396     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(byte[] data)
25397         throws com.google.protobuf.InvalidProtocolBufferException {
25398       return PARSER.parseFrom(data);
25399     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25400     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(
25401         byte[] data,
25402         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25403         throws com.google.protobuf.InvalidProtocolBufferException {
25404       return PARSER.parseFrom(data, extensionRegistry);
25405     }
parseFrom(java.io.InputStream input)25406     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(java.io.InputStream input)
25407         throws java.io.IOException {
25408       return PARSER.parseFrom(input);
25409     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25410     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(
25411         java.io.InputStream input,
25412         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25413         throws java.io.IOException {
25414       return PARSER.parseFrom(input, extensionRegistry);
25415     }
parseDelimitedFrom(java.io.InputStream input)25416     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseDelimitedFrom(java.io.InputStream input)
25417         throws java.io.IOException {
25418       return PARSER.parseDelimitedFrom(input);
25419     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25420     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseDelimitedFrom(
25421         java.io.InputStream input,
25422         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25423         throws java.io.IOException {
25424       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25425     }
parseFrom( com.google.protobuf.CodedInputStream input)25426     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(
25427         com.google.protobuf.CodedInputStream input)
25428         throws java.io.IOException {
25429       return PARSER.parseFrom(input);
25430     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25431     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parseFrom(
25432         com.google.protobuf.CodedInputStream input,
25433         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25434         throws java.io.IOException {
25435       return PARSER.parseFrom(input, extensionRegistry);
25436     }
25437 
newBuilder()25438     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()25439     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse prototype)25440     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse prototype) {
25441       return newBuilder().mergeFrom(prototype);
25442     }
toBuilder()25443     public Builder toBuilder() { return newBuilder(this); }
25444 
25445     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)25446     protected Builder newBuilderForType(
25447         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25448       Builder builder = new Builder(parent);
25449       return builder;
25450     }
25451     /**
25452      * Protobuf type {@code BalanceResponse}
25453      */
25454     public static final class Builder extends
25455         com.google.protobuf.GeneratedMessage.Builder<Builder>
25456        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponseOrBuilder {
25457       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25458           getDescriptor() {
25459         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_descriptor;
25460       }
25461 
25462       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25463           internalGetFieldAccessorTable() {
25464         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_fieldAccessorTable
25465             .ensureFieldAccessorsInitialized(
25466                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.Builder.class);
25467       }
25468 
25469       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.newBuilder()
Builder()25470       private Builder() {
25471         maybeForceBuilderInitialization();
25472       }
25473 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)25474       private Builder(
25475           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25476         super(parent);
25477         maybeForceBuilderInitialization();
25478       }
maybeForceBuilderInitialization()25479       private void maybeForceBuilderInitialization() {
25480         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25481         }
25482       }
create()25483       private static Builder create() {
25484         return new Builder();
25485       }
25486 
clear()25487       public Builder clear() {
25488         super.clear();
25489         balancerRan_ = false;
25490         bitField0_ = (bitField0_ & ~0x00000001);
25491         return this;
25492       }
25493 
clone()25494       public Builder clone() {
25495         return create().mergeFrom(buildPartial());
25496       }
25497 
25498       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()25499           getDescriptorForType() {
25500         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_BalanceResponse_descriptor;
25501       }
25502 
getDefaultInstanceForType()25503       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse getDefaultInstanceForType() {
25504         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance();
25505       }
25506 
build()25507       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse build() {
25508         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse result = buildPartial();
25509         if (!result.isInitialized()) {
25510           throw newUninitializedMessageException(result);
25511         }
25512         return result;
25513       }
25514 
buildPartial()25515       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse buildPartial() {
25516         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse(this);
25517         int from_bitField0_ = bitField0_;
25518         int to_bitField0_ = 0;
25519         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
25520           to_bitField0_ |= 0x00000001;
25521         }
25522         result.balancerRan_ = balancerRan_;
25523         result.bitField0_ = to_bitField0_;
25524         onBuilt();
25525         return result;
25526       }
25527 
mergeFrom(com.google.protobuf.Message other)25528       public Builder mergeFrom(com.google.protobuf.Message other) {
25529         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) {
25530           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse)other);
25531         } else {
25532           super.mergeFrom(other);
25533           return this;
25534         }
25535       }
25536 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse other)25537       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse other) {
25538         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance()) return this;
25539         if (other.hasBalancerRan()) {
25540           setBalancerRan(other.getBalancerRan());
25541         }
25542         this.mergeUnknownFields(other.getUnknownFields());
25543         return this;
25544       }
25545 
isInitialized()25546       public final boolean isInitialized() {
25547         if (!hasBalancerRan()) {
25548 
25549           return false;
25550         }
25551         return true;
25552       }
25553 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25554       public Builder mergeFrom(
25555           com.google.protobuf.CodedInputStream input,
25556           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25557           throws java.io.IOException {
25558         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse parsedMessage = null;
25559         try {
25560           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25561         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25562           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) e.getUnfinishedMessage();
25563           throw e;
25564         } finally {
25565           if (parsedMessage != null) {
25566             mergeFrom(parsedMessage);
25567           }
25568         }
25569         return this;
25570       }
25571       private int bitField0_;
25572 
25573       // required bool balancer_ran = 1;
25574       private boolean balancerRan_ ;
25575       /**
25576        * <code>required bool balancer_ran = 1;</code>
25577        */
hasBalancerRan()25578       public boolean hasBalancerRan() {
25579         return ((bitField0_ & 0x00000001) == 0x00000001);
25580       }
25581       /**
25582        * <code>required bool balancer_ran = 1;</code>
25583        */
getBalancerRan()25584       public boolean getBalancerRan() {
25585         return balancerRan_;
25586       }
25587       /**
25588        * <code>required bool balancer_ran = 1;</code>
25589        */
setBalancerRan(boolean value)25590       public Builder setBalancerRan(boolean value) {
25591         bitField0_ |= 0x00000001;
25592         balancerRan_ = value;
25593         onChanged();
25594         return this;
25595       }
25596       /**
25597        * <code>required bool balancer_ran = 1;</code>
25598        */
clearBalancerRan()25599       public Builder clearBalancerRan() {
25600         bitField0_ = (bitField0_ & ~0x00000001);
25601         balancerRan_ = false;
25602         onChanged();
25603         return this;
25604       }
25605 
25606       // @@protoc_insertion_point(builder_scope:BalanceResponse)
25607     }
25608 
25609     static {
25610       defaultInstance = new BalanceResponse(true);
defaultInstance.initFields()25611       defaultInstance.initFields();
25612     }
25613 
25614     // @@protoc_insertion_point(class_scope:BalanceResponse)
25615   }
25616 
25617   public interface SetBalancerRunningRequestOrBuilder
25618       extends com.google.protobuf.MessageOrBuilder {
25619 
25620     // required bool on = 1;
25621     /**
25622      * <code>required bool on = 1;</code>
25623      */
hasOn()25624     boolean hasOn();
25625     /**
25626      * <code>required bool on = 1;</code>
25627      */
getOn()25628     boolean getOn();
25629 
25630     // optional bool synchronous = 2;
25631     /**
25632      * <code>optional bool synchronous = 2;</code>
25633      */
hasSynchronous()25634     boolean hasSynchronous();
25635     /**
25636      * <code>optional bool synchronous = 2;</code>
25637      */
getSynchronous()25638     boolean getSynchronous();
25639   }
25640   /**
25641    * Protobuf type {@code SetBalancerRunningRequest}
25642    */
25643   public static final class SetBalancerRunningRequest extends
25644       com.google.protobuf.GeneratedMessage
25645       implements SetBalancerRunningRequestOrBuilder {
25646     // Use SetBalancerRunningRequest.newBuilder() to construct.
SetBalancerRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)25647     private SetBalancerRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
25648       super(builder);
25649       this.unknownFields = builder.getUnknownFields();
25650     }
SetBalancerRunningRequest(boolean noInit)25651     private SetBalancerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
25652 
25653     private static final SetBalancerRunningRequest defaultInstance;
getDefaultInstance()25654     public static SetBalancerRunningRequest getDefaultInstance() {
25655       return defaultInstance;
25656     }
25657 
getDefaultInstanceForType()25658     public SetBalancerRunningRequest getDefaultInstanceForType() {
25659       return defaultInstance;
25660     }
25661 
25662     private final com.google.protobuf.UnknownFieldSet unknownFields;
25663     @java.lang.Override
25664     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()25665         getUnknownFields() {
25666       return this.unknownFields;
25667     }
SetBalancerRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25668     private SetBalancerRunningRequest(
25669         com.google.protobuf.CodedInputStream input,
25670         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25671         throws com.google.protobuf.InvalidProtocolBufferException {
25672       initFields();
25673       int mutable_bitField0_ = 0;
25674       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
25675           com.google.protobuf.UnknownFieldSet.newBuilder();
25676       try {
25677         boolean done = false;
25678         while (!done) {
25679           int tag = input.readTag();
25680           switch (tag) {
25681             case 0:
25682               done = true;
25683               break;
25684             default: {
25685               if (!parseUnknownField(input, unknownFields,
25686                                      extensionRegistry, tag)) {
25687                 done = true;
25688               }
25689               break;
25690             }
25691             case 8: {
25692               bitField0_ |= 0x00000001;
25693               on_ = input.readBool();
25694               break;
25695             }
25696             case 16: {
25697               bitField0_ |= 0x00000002;
25698               synchronous_ = input.readBool();
25699               break;
25700             }
25701           }
25702         }
25703       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25704         throw e.setUnfinishedMessage(this);
25705       } catch (java.io.IOException e) {
25706         throw new com.google.protobuf.InvalidProtocolBufferException(
25707             e.getMessage()).setUnfinishedMessage(this);
25708       } finally {
25709         this.unknownFields = unknownFields.build();
25710         makeExtensionsImmutable();
25711       }
25712     }
25713     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25714         getDescriptor() {
25715       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_descriptor;
25716     }
25717 
25718     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25719         internalGetFieldAccessorTable() {
25720       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable
25721           .ensureFieldAccessorsInitialized(
25722               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.Builder.class);
25723     }
25724 
25725     public static com.google.protobuf.Parser<SetBalancerRunningRequest> PARSER =
25726         new com.google.protobuf.AbstractParser<SetBalancerRunningRequest>() {
25727       public SetBalancerRunningRequest parsePartialFrom(
25728           com.google.protobuf.CodedInputStream input,
25729           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25730           throws com.google.protobuf.InvalidProtocolBufferException {
25731         return new SetBalancerRunningRequest(input, extensionRegistry);
25732       }
25733     };
25734 
25735     @java.lang.Override
getParserForType()25736     public com.google.protobuf.Parser<SetBalancerRunningRequest> getParserForType() {
25737       return PARSER;
25738     }
25739 
25740     private int bitField0_;
25741     // required bool on = 1;
25742     public static final int ON_FIELD_NUMBER = 1;
25743     private boolean on_;
25744     /**
25745      * <code>required bool on = 1;</code>
25746      */
hasOn()25747     public boolean hasOn() {
25748       return ((bitField0_ & 0x00000001) == 0x00000001);
25749     }
25750     /**
25751      * <code>required bool on = 1;</code>
25752      */
getOn()25753     public boolean getOn() {
25754       return on_;
25755     }
25756 
25757     // optional bool synchronous = 2;
25758     public static final int SYNCHRONOUS_FIELD_NUMBER = 2;
25759     private boolean synchronous_;
25760     /**
25761      * <code>optional bool synchronous = 2;</code>
25762      */
hasSynchronous()25763     public boolean hasSynchronous() {
25764       return ((bitField0_ & 0x00000002) == 0x00000002);
25765     }
25766     /**
25767      * <code>optional bool synchronous = 2;</code>
25768      */
getSynchronous()25769     public boolean getSynchronous() {
25770       return synchronous_;
25771     }
25772 
initFields()25773     private void initFields() {
25774       on_ = false;
25775       synchronous_ = false;
25776     }
25777     private byte memoizedIsInitialized = -1;
isInitialized()25778     public final boolean isInitialized() {
25779       byte isInitialized = memoizedIsInitialized;
25780       if (isInitialized != -1) return isInitialized == 1;
25781 
25782       if (!hasOn()) {
25783         memoizedIsInitialized = 0;
25784         return false;
25785       }
25786       memoizedIsInitialized = 1;
25787       return true;
25788     }
25789 
writeTo(com.google.protobuf.CodedOutputStream output)25790     public void writeTo(com.google.protobuf.CodedOutputStream output)
25791                         throws java.io.IOException {
25792       getSerializedSize();
25793       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25794         output.writeBool(1, on_);
25795       }
25796       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25797         output.writeBool(2, synchronous_);
25798       }
25799       getUnknownFields().writeTo(output);
25800     }
25801 
25802     private int memoizedSerializedSize = -1;
getSerializedSize()25803     public int getSerializedSize() {
25804       int size = memoizedSerializedSize;
25805       if (size != -1) return size;
25806 
25807       size = 0;
25808       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25809         size += com.google.protobuf.CodedOutputStream
25810           .computeBoolSize(1, on_);
25811       }
25812       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25813         size += com.google.protobuf.CodedOutputStream
25814           .computeBoolSize(2, synchronous_);
25815       }
25816       size += getUnknownFields().getSerializedSize();
25817       memoizedSerializedSize = size;
25818       return size;
25819     }
25820 
25821     private static final long serialVersionUID = 0L;
25822     @java.lang.Override
writeReplace()25823     protected java.lang.Object writeReplace()
25824         throws java.io.ObjectStreamException {
25825       return super.writeReplace();
25826     }
25827 
25828     @java.lang.Override
equals(final java.lang.Object obj)25829     public boolean equals(final java.lang.Object obj) {
25830       if (obj == this) {
25831        return true;
25832       }
25833       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)) {
25834         return super.equals(obj);
25835       }
25836       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest) obj;
25837 
25838       boolean result = true;
25839       result = result && (hasOn() == other.hasOn());
25840       if (hasOn()) {
25841         result = result && (getOn()
25842             == other.getOn());
25843       }
25844       result = result && (hasSynchronous() == other.hasSynchronous());
25845       if (hasSynchronous()) {
25846         result = result && (getSynchronous()
25847             == other.getSynchronous());
25848       }
25849       result = result &&
25850           getUnknownFields().equals(other.getUnknownFields());
25851       return result;
25852     }
25853 
25854     private int memoizedHashCode = 0;
25855     @java.lang.Override
hashCode()25856     public int hashCode() {
25857       if (memoizedHashCode != 0) {
25858         return memoizedHashCode;
25859       }
25860       int hash = 41;
25861       hash = (19 * hash) + getDescriptorForType().hashCode();
25862       if (hasOn()) {
25863         hash = (37 * hash) + ON_FIELD_NUMBER;
25864         hash = (53 * hash) + hashBoolean(getOn());
25865       }
25866       if (hasSynchronous()) {
25867         hash = (37 * hash) + SYNCHRONOUS_FIELD_NUMBER;
25868         hash = (53 * hash) + hashBoolean(getSynchronous());
25869       }
25870       hash = (29 * hash) + getUnknownFields().hashCode();
25871       memoizedHashCode = hash;
25872       return hash;
25873     }
25874 
parseFrom( com.google.protobuf.ByteString data)25875     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(
25876         com.google.protobuf.ByteString data)
25877         throws com.google.protobuf.InvalidProtocolBufferException {
25878       return PARSER.parseFrom(data);
25879     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25880     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(
25881         com.google.protobuf.ByteString data,
25882         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25883         throws com.google.protobuf.InvalidProtocolBufferException {
25884       return PARSER.parseFrom(data, extensionRegistry);
25885     }
parseFrom(byte[] data)25886     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(byte[] data)
25887         throws com.google.protobuf.InvalidProtocolBufferException {
25888       return PARSER.parseFrom(data);
25889     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25890     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(
25891         byte[] data,
25892         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25893         throws com.google.protobuf.InvalidProtocolBufferException {
25894       return PARSER.parseFrom(data, extensionRegistry);
25895     }
parseFrom(java.io.InputStream input)25896     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(java.io.InputStream input)
25897         throws java.io.IOException {
25898       return PARSER.parseFrom(input);
25899     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25900     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(
25901         java.io.InputStream input,
25902         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25903         throws java.io.IOException {
25904       return PARSER.parseFrom(input, extensionRegistry);
25905     }
parseDelimitedFrom(java.io.InputStream input)25906     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseDelimitedFrom(java.io.InputStream input)
25907         throws java.io.IOException {
25908       return PARSER.parseDelimitedFrom(input);
25909     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25910     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseDelimitedFrom(
25911         java.io.InputStream input,
25912         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25913         throws java.io.IOException {
25914       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25915     }
parseFrom( com.google.protobuf.CodedInputStream input)25916     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(
25917         com.google.protobuf.CodedInputStream input)
25918         throws java.io.IOException {
25919       return PARSER.parseFrom(input);
25920     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25921     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parseFrom(
25922         com.google.protobuf.CodedInputStream input,
25923         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25924         throws java.io.IOException {
25925       return PARSER.parseFrom(input, extensionRegistry);
25926     }
25927 
newBuilder()25928     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()25929     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest prototype)25930     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest prototype) {
25931       return newBuilder().mergeFrom(prototype);
25932     }
toBuilder()25933     public Builder toBuilder() { return newBuilder(this); }
25934 
25935     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)25936     protected Builder newBuilderForType(
25937         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25938       Builder builder = new Builder(parent);
25939       return builder;
25940     }
25941     /**
25942      * Protobuf type {@code SetBalancerRunningRequest}
25943      */
25944     public static final class Builder extends
25945         com.google.protobuf.GeneratedMessage.Builder<Builder>
25946        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequestOrBuilder {
25947       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25948           getDescriptor() {
25949         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_descriptor;
25950       }
25951 
25952       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25953           internalGetFieldAccessorTable() {
25954         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_fieldAccessorTable
25955             .ensureFieldAccessorsInitialized(
25956                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.Builder.class);
25957       }
25958 
25959       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.newBuilder()
Builder()25960       private Builder() {
25961         maybeForceBuilderInitialization();
25962       }
25963 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)25964       private Builder(
25965           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25966         super(parent);
25967         maybeForceBuilderInitialization();
25968       }
maybeForceBuilderInitialization()25969       private void maybeForceBuilderInitialization() {
25970         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25971         }
25972       }
create()25973       private static Builder create() {
25974         return new Builder();
25975       }
25976 
clear()25977       public Builder clear() {
25978         super.clear();
25979         on_ = false;
25980         bitField0_ = (bitField0_ & ~0x00000001);
25981         synchronous_ = false;
25982         bitField0_ = (bitField0_ & ~0x00000002);
25983         return this;
25984       }
25985 
clone()25986       public Builder clone() {
25987         return create().mergeFrom(buildPartial());
25988       }
25989 
25990       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()25991           getDescriptorForType() {
25992         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningRequest_descriptor;
25993       }
25994 
getDefaultInstanceForType()25995       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest getDefaultInstanceForType() {
25996         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance();
25997       }
25998 
build()25999       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest build() {
26000         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest result = buildPartial();
26001         if (!result.isInitialized()) {
26002           throw newUninitializedMessageException(result);
26003         }
26004         return result;
26005       }
26006 
buildPartial()26007       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest buildPartial() {
26008         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest(this);
26009         int from_bitField0_ = bitField0_;
26010         int to_bitField0_ = 0;
26011         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
26012           to_bitField0_ |= 0x00000001;
26013         }
26014         result.on_ = on_;
26015         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
26016           to_bitField0_ |= 0x00000002;
26017         }
26018         result.synchronous_ = synchronous_;
26019         result.bitField0_ = to_bitField0_;
26020         onBuilt();
26021         return result;
26022       }
26023 
mergeFrom(com.google.protobuf.Message other)26024       public Builder mergeFrom(com.google.protobuf.Message other) {
26025         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest) {
26026           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)other);
26027         } else {
26028           super.mergeFrom(other);
26029           return this;
26030         }
26031       }
26032 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest other)26033       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest other) {
26034         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance()) return this;
26035         if (other.hasOn()) {
26036           setOn(other.getOn());
26037         }
26038         if (other.hasSynchronous()) {
26039           setSynchronous(other.getSynchronous());
26040         }
26041         this.mergeUnknownFields(other.getUnknownFields());
26042         return this;
26043       }
26044 
isInitialized()26045       public final boolean isInitialized() {
26046         if (!hasOn()) {
26047 
26048           return false;
26049         }
26050         return true;
26051       }
26052 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26053       public Builder mergeFrom(
26054           com.google.protobuf.CodedInputStream input,
26055           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26056           throws java.io.IOException {
26057         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest parsedMessage = null;
26058         try {
26059           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
26060         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26061           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest) e.getUnfinishedMessage();
26062           throw e;
26063         } finally {
26064           if (parsedMessage != null) {
26065             mergeFrom(parsedMessage);
26066           }
26067         }
26068         return this;
26069       }
26070       private int bitField0_;
26071 
26072       // required bool on = 1;
26073       private boolean on_ ;
26074       /**
26075        * <code>required bool on = 1;</code>
26076        */
hasOn()26077       public boolean hasOn() {
26078         return ((bitField0_ & 0x00000001) == 0x00000001);
26079       }
26080       /**
26081        * <code>required bool on = 1;</code>
26082        */
getOn()26083       public boolean getOn() {
26084         return on_;
26085       }
26086       /**
26087        * <code>required bool on = 1;</code>
26088        */
setOn(boolean value)26089       public Builder setOn(boolean value) {
26090         bitField0_ |= 0x00000001;
26091         on_ = value;
26092         onChanged();
26093         return this;
26094       }
26095       /**
26096        * <code>required bool on = 1;</code>
26097        */
clearOn()26098       public Builder clearOn() {
26099         bitField0_ = (bitField0_ & ~0x00000001);
26100         on_ = false;
26101         onChanged();
26102         return this;
26103       }
26104 
26105       // optional bool synchronous = 2;
26106       private boolean synchronous_ ;
26107       /**
26108        * <code>optional bool synchronous = 2;</code>
26109        */
hasSynchronous()26110       public boolean hasSynchronous() {
26111         return ((bitField0_ & 0x00000002) == 0x00000002);
26112       }
26113       /**
26114        * <code>optional bool synchronous = 2;</code>
26115        */
getSynchronous()26116       public boolean getSynchronous() {
26117         return synchronous_;
26118       }
26119       /**
26120        * <code>optional bool synchronous = 2;</code>
26121        */
setSynchronous(boolean value)26122       public Builder setSynchronous(boolean value) {
26123         bitField0_ |= 0x00000002;
26124         synchronous_ = value;
26125         onChanged();
26126         return this;
26127       }
26128       /**
26129        * <code>optional bool synchronous = 2;</code>
26130        */
clearSynchronous()26131       public Builder clearSynchronous() {
26132         bitField0_ = (bitField0_ & ~0x00000002);
26133         synchronous_ = false;
26134         onChanged();
26135         return this;
26136       }
26137 
26138       // @@protoc_insertion_point(builder_scope:SetBalancerRunningRequest)
26139     }
26140 
26141     static {
26142       defaultInstance = new SetBalancerRunningRequest(true);
defaultInstance.initFields()26143       defaultInstance.initFields();
26144     }
26145 
26146     // @@protoc_insertion_point(class_scope:SetBalancerRunningRequest)
26147   }
26148 
26149   public interface SetBalancerRunningResponseOrBuilder
26150       extends com.google.protobuf.MessageOrBuilder {
26151 
26152     // optional bool prev_balance_value = 1;
26153     /**
26154      * <code>optional bool prev_balance_value = 1;</code>
26155      */
hasPrevBalanceValue()26156     boolean hasPrevBalanceValue();
26157     /**
26158      * <code>optional bool prev_balance_value = 1;</code>
26159      */
getPrevBalanceValue()26160     boolean getPrevBalanceValue();
26161   }
26162   /**
26163    * Protobuf type {@code SetBalancerRunningResponse}
26164    */
26165   public static final class SetBalancerRunningResponse extends
26166       com.google.protobuf.GeneratedMessage
26167       implements SetBalancerRunningResponseOrBuilder {
26168     // Use SetBalancerRunningResponse.newBuilder() to construct.
SetBalancerRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)26169     private SetBalancerRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
26170       super(builder);
26171       this.unknownFields = builder.getUnknownFields();
26172     }
SetBalancerRunningResponse(boolean noInit)26173     private SetBalancerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
26174 
26175     private static final SetBalancerRunningResponse defaultInstance;
getDefaultInstance()26176     public static SetBalancerRunningResponse getDefaultInstance() {
26177       return defaultInstance;
26178     }
26179 
getDefaultInstanceForType()26180     public SetBalancerRunningResponse getDefaultInstanceForType() {
26181       return defaultInstance;
26182     }
26183 
26184     private final com.google.protobuf.UnknownFieldSet unknownFields;
26185     @java.lang.Override
26186     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()26187         getUnknownFields() {
26188       return this.unknownFields;
26189     }
SetBalancerRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26190     private SetBalancerRunningResponse(
26191         com.google.protobuf.CodedInputStream input,
26192         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26193         throws com.google.protobuf.InvalidProtocolBufferException {
26194       initFields();
26195       int mutable_bitField0_ = 0;
26196       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
26197           com.google.protobuf.UnknownFieldSet.newBuilder();
26198       try {
26199         boolean done = false;
26200         while (!done) {
26201           int tag = input.readTag();
26202           switch (tag) {
26203             case 0:
26204               done = true;
26205               break;
26206             default: {
26207               if (!parseUnknownField(input, unknownFields,
26208                                      extensionRegistry, tag)) {
26209                 done = true;
26210               }
26211               break;
26212             }
26213             case 8: {
26214               bitField0_ |= 0x00000001;
26215               prevBalanceValue_ = input.readBool();
26216               break;
26217             }
26218           }
26219         }
26220       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26221         throw e.setUnfinishedMessage(this);
26222       } catch (java.io.IOException e) {
26223         throw new com.google.protobuf.InvalidProtocolBufferException(
26224             e.getMessage()).setUnfinishedMessage(this);
26225       } finally {
26226         this.unknownFields = unknownFields.build();
26227         makeExtensionsImmutable();
26228       }
26229     }
26230     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()26231         getDescriptor() {
26232       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_descriptor;
26233     }
26234 
26235     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()26236         internalGetFieldAccessorTable() {
26237       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable
26238           .ensureFieldAccessorsInitialized(
26239               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.Builder.class);
26240     }
26241 
26242     public static com.google.protobuf.Parser<SetBalancerRunningResponse> PARSER =
26243         new com.google.protobuf.AbstractParser<SetBalancerRunningResponse>() {
26244       public SetBalancerRunningResponse parsePartialFrom(
26245           com.google.protobuf.CodedInputStream input,
26246           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26247           throws com.google.protobuf.InvalidProtocolBufferException {
26248         return new SetBalancerRunningResponse(input, extensionRegistry);
26249       }
26250     };
26251 
26252     @java.lang.Override
getParserForType()26253     public com.google.protobuf.Parser<SetBalancerRunningResponse> getParserForType() {
26254       return PARSER;
26255     }
26256 
26257     private int bitField0_;
26258     // optional bool prev_balance_value = 1;
26259     public static final int PREV_BALANCE_VALUE_FIELD_NUMBER = 1;
26260     private boolean prevBalanceValue_;
26261     /**
26262      * <code>optional bool prev_balance_value = 1;</code>
26263      */
hasPrevBalanceValue()26264     public boolean hasPrevBalanceValue() {
26265       return ((bitField0_ & 0x00000001) == 0x00000001);
26266     }
26267     /**
26268      * <code>optional bool prev_balance_value = 1;</code>
26269      */
getPrevBalanceValue()26270     public boolean getPrevBalanceValue() {
26271       return prevBalanceValue_;
26272     }
26273 
initFields()26274     private void initFields() {
26275       prevBalanceValue_ = false;
26276     }
26277     private byte memoizedIsInitialized = -1;
isInitialized()26278     public final boolean isInitialized() {
26279       byte isInitialized = memoizedIsInitialized;
26280       if (isInitialized != -1) return isInitialized == 1;
26281 
26282       memoizedIsInitialized = 1;
26283       return true;
26284     }
26285 
writeTo(com.google.protobuf.CodedOutputStream output)26286     public void writeTo(com.google.protobuf.CodedOutputStream output)
26287                         throws java.io.IOException {
26288       getSerializedSize();
26289       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26290         output.writeBool(1, prevBalanceValue_);
26291       }
26292       getUnknownFields().writeTo(output);
26293     }
26294 
26295     private int memoizedSerializedSize = -1;
getSerializedSize()26296     public int getSerializedSize() {
26297       int size = memoizedSerializedSize;
26298       if (size != -1) return size;
26299 
26300       size = 0;
26301       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26302         size += com.google.protobuf.CodedOutputStream
26303           .computeBoolSize(1, prevBalanceValue_);
26304       }
26305       size += getUnknownFields().getSerializedSize();
26306       memoizedSerializedSize = size;
26307       return size;
26308     }
26309 
26310     private static final long serialVersionUID = 0L;
26311     @java.lang.Override
writeReplace()26312     protected java.lang.Object writeReplace()
26313         throws java.io.ObjectStreamException {
26314       return super.writeReplace();
26315     }
26316 
26317     @java.lang.Override
equals(final java.lang.Object obj)26318     public boolean equals(final java.lang.Object obj) {
26319       if (obj == this) {
26320        return true;
26321       }
26322       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse)) {
26323         return super.equals(obj);
26324       }
26325       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) obj;
26326 
26327       boolean result = true;
26328       result = result && (hasPrevBalanceValue() == other.hasPrevBalanceValue());
26329       if (hasPrevBalanceValue()) {
26330         result = result && (getPrevBalanceValue()
26331             == other.getPrevBalanceValue());
26332       }
26333       result = result &&
26334           getUnknownFields().equals(other.getUnknownFields());
26335       return result;
26336     }
26337 
26338     private int memoizedHashCode = 0;
26339     @java.lang.Override
hashCode()26340     public int hashCode() {
26341       if (memoizedHashCode != 0) {
26342         return memoizedHashCode;
26343       }
26344       int hash = 41;
26345       hash = (19 * hash) + getDescriptorForType().hashCode();
26346       if (hasPrevBalanceValue()) {
26347         hash = (37 * hash) + PREV_BALANCE_VALUE_FIELD_NUMBER;
26348         hash = (53 * hash) + hashBoolean(getPrevBalanceValue());
26349       }
26350       hash = (29 * hash) + getUnknownFields().hashCode();
26351       memoizedHashCode = hash;
26352       return hash;
26353     }
26354 
parseFrom( com.google.protobuf.ByteString data)26355     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(
26356         com.google.protobuf.ByteString data)
26357         throws com.google.protobuf.InvalidProtocolBufferException {
26358       return PARSER.parseFrom(data);
26359     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26360     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(
26361         com.google.protobuf.ByteString data,
26362         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26363         throws com.google.protobuf.InvalidProtocolBufferException {
26364       return PARSER.parseFrom(data, extensionRegistry);
26365     }
parseFrom(byte[] data)26366     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(byte[] data)
26367         throws com.google.protobuf.InvalidProtocolBufferException {
26368       return PARSER.parseFrom(data);
26369     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26370     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(
26371         byte[] data,
26372         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26373         throws com.google.protobuf.InvalidProtocolBufferException {
26374       return PARSER.parseFrom(data, extensionRegistry);
26375     }
parseFrom(java.io.InputStream input)26376     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(java.io.InputStream input)
26377         throws java.io.IOException {
26378       return PARSER.parseFrom(input);
26379     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26380     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(
26381         java.io.InputStream input,
26382         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26383         throws java.io.IOException {
26384       return PARSER.parseFrom(input, extensionRegistry);
26385     }
parseDelimitedFrom(java.io.InputStream input)26386     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseDelimitedFrom(java.io.InputStream input)
26387         throws java.io.IOException {
26388       return PARSER.parseDelimitedFrom(input);
26389     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26390     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseDelimitedFrom(
26391         java.io.InputStream input,
26392         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26393         throws java.io.IOException {
26394       return PARSER.parseDelimitedFrom(input, extensionRegistry);
26395     }
parseFrom( com.google.protobuf.CodedInputStream input)26396     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(
26397         com.google.protobuf.CodedInputStream input)
26398         throws java.io.IOException {
26399       return PARSER.parseFrom(input);
26400     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26401     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parseFrom(
26402         com.google.protobuf.CodedInputStream input,
26403         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26404         throws java.io.IOException {
26405       return PARSER.parseFrom(input, extensionRegistry);
26406     }
26407 
newBuilder()26408     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()26409     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse prototype)26410     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse prototype) {
26411       return newBuilder().mergeFrom(prototype);
26412     }
toBuilder()26413     public Builder toBuilder() { return newBuilder(this); }
26414 
26415     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)26416     protected Builder newBuilderForType(
26417         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26418       Builder builder = new Builder(parent);
26419       return builder;
26420     }
26421     /**
26422      * Protobuf type {@code SetBalancerRunningResponse}
26423      */
26424     public static final class Builder extends
26425         com.google.protobuf.GeneratedMessage.Builder<Builder>
26426        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponseOrBuilder {
26427       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()26428           getDescriptor() {
26429         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_descriptor;
26430       }
26431 
26432       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()26433           internalGetFieldAccessorTable() {
26434         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_fieldAccessorTable
26435             .ensureFieldAccessorsInitialized(
26436                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.Builder.class);
26437       }
26438 
26439       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.newBuilder()
Builder()26440       private Builder() {
26441         maybeForceBuilderInitialization();
26442       }
26443 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)26444       private Builder(
26445           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26446         super(parent);
26447         maybeForceBuilderInitialization();
26448       }
maybeForceBuilderInitialization()26449       private void maybeForceBuilderInitialization() {
26450         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
26451         }
26452       }
create()26453       private static Builder create() {
26454         return new Builder();
26455       }
26456 
clear()26457       public Builder clear() {
26458         super.clear();
26459         prevBalanceValue_ = false;
26460         bitField0_ = (bitField0_ & ~0x00000001);
26461         return this;
26462       }
26463 
clone()26464       public Builder clone() {
26465         return create().mergeFrom(buildPartial());
26466       }
26467 
26468       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()26469           getDescriptorForType() {
26470         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetBalancerRunningResponse_descriptor;
26471       }
26472 
getDefaultInstanceForType()26473       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse getDefaultInstanceForType() {
26474         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance();
26475       }
26476 
build()26477       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse build() {
26478         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse result = buildPartial();
26479         if (!result.isInitialized()) {
26480           throw newUninitializedMessageException(result);
26481         }
26482         return result;
26483       }
26484 
buildPartial()26485       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse buildPartial() {
26486         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse(this);
26487         int from_bitField0_ = bitField0_;
26488         int to_bitField0_ = 0;
26489         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
26490           to_bitField0_ |= 0x00000001;
26491         }
26492         result.prevBalanceValue_ = prevBalanceValue_;
26493         result.bitField0_ = to_bitField0_;
26494         onBuilt();
26495         return result;
26496       }
26497 
mergeFrom(com.google.protobuf.Message other)26498       public Builder mergeFrom(com.google.protobuf.Message other) {
26499         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) {
26500           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse)other);
26501         } else {
26502           super.mergeFrom(other);
26503           return this;
26504         }
26505       }
26506 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse other)26507       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse other) {
26508         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance()) return this;
26509         if (other.hasPrevBalanceValue()) {
26510           setPrevBalanceValue(other.getPrevBalanceValue());
26511         }
26512         this.mergeUnknownFields(other.getUnknownFields());
26513         return this;
26514       }
26515 
isInitialized()26516       public final boolean isInitialized() {
26517         return true;
26518       }
26519 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26520       public Builder mergeFrom(
26521           com.google.protobuf.CodedInputStream input,
26522           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26523           throws java.io.IOException {
26524         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse parsedMessage = null;
26525         try {
26526           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
26527         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26528           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) e.getUnfinishedMessage();
26529           throw e;
26530         } finally {
26531           if (parsedMessage != null) {
26532             mergeFrom(parsedMessage);
26533           }
26534         }
26535         return this;
26536       }
26537       private int bitField0_;
26538 
26539       // optional bool prev_balance_value = 1;
26540       private boolean prevBalanceValue_ ;
26541       /**
26542        * <code>optional bool prev_balance_value = 1;</code>
26543        */
hasPrevBalanceValue()26544       public boolean hasPrevBalanceValue() {
26545         return ((bitField0_ & 0x00000001) == 0x00000001);
26546       }
26547       /**
26548        * <code>optional bool prev_balance_value = 1;</code>
26549        */
getPrevBalanceValue()26550       public boolean getPrevBalanceValue() {
26551         return prevBalanceValue_;
26552       }
26553       /**
26554        * <code>optional bool prev_balance_value = 1;</code>
26555        */
setPrevBalanceValue(boolean value)26556       public Builder setPrevBalanceValue(boolean value) {
26557         bitField0_ |= 0x00000001;
26558         prevBalanceValue_ = value;
26559         onChanged();
26560         return this;
26561       }
26562       /**
26563        * <code>optional bool prev_balance_value = 1;</code>
26564        */
clearPrevBalanceValue()26565       public Builder clearPrevBalanceValue() {
26566         bitField0_ = (bitField0_ & ~0x00000001);
26567         prevBalanceValue_ = false;
26568         onChanged();
26569         return this;
26570       }
26571 
26572       // @@protoc_insertion_point(builder_scope:SetBalancerRunningResponse)
26573     }
26574 
26575     static {
26576       defaultInstance = new SetBalancerRunningResponse(true);
defaultInstance.initFields()26577       defaultInstance.initFields();
26578     }
26579 
26580     // @@protoc_insertion_point(class_scope:SetBalancerRunningResponse)
26581   }
26582 
26583   public interface IsBalancerEnabledRequestOrBuilder
26584       extends com.google.protobuf.MessageOrBuilder {
26585   }
26586   /**
26587    * Protobuf type {@code IsBalancerEnabledRequest}
26588    */
26589   public static final class IsBalancerEnabledRequest extends
26590       com.google.protobuf.GeneratedMessage
26591       implements IsBalancerEnabledRequestOrBuilder {
26592     // Use IsBalancerEnabledRequest.newBuilder() to construct.
IsBalancerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)26593     private IsBalancerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
26594       super(builder);
26595       this.unknownFields = builder.getUnknownFields();
26596     }
IsBalancerEnabledRequest(boolean noInit)26597     private IsBalancerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
26598 
26599     private static final IsBalancerEnabledRequest defaultInstance;
getDefaultInstance()26600     public static IsBalancerEnabledRequest getDefaultInstance() {
26601       return defaultInstance;
26602     }
26603 
getDefaultInstanceForType()26604     public IsBalancerEnabledRequest getDefaultInstanceForType() {
26605       return defaultInstance;
26606     }
26607 
26608     private final com.google.protobuf.UnknownFieldSet unknownFields;
26609     @java.lang.Override
26610     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()26611         getUnknownFields() {
26612       return this.unknownFields;
26613     }
IsBalancerEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26614     private IsBalancerEnabledRequest(
26615         com.google.protobuf.CodedInputStream input,
26616         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26617         throws com.google.protobuf.InvalidProtocolBufferException {
26618       initFields();
26619       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
26620           com.google.protobuf.UnknownFieldSet.newBuilder();
26621       try {
26622         boolean done = false;
26623         while (!done) {
26624           int tag = input.readTag();
26625           switch (tag) {
26626             case 0:
26627               done = true;
26628               break;
26629             default: {
26630               if (!parseUnknownField(input, unknownFields,
26631                                      extensionRegistry, tag)) {
26632                 done = true;
26633               }
26634               break;
26635             }
26636           }
26637         }
26638       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26639         throw e.setUnfinishedMessage(this);
26640       } catch (java.io.IOException e) {
26641         throw new com.google.protobuf.InvalidProtocolBufferException(
26642             e.getMessage()).setUnfinishedMessage(this);
26643       } finally {
26644         this.unknownFields = unknownFields.build();
26645         makeExtensionsImmutable();
26646       }
26647     }
26648     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()26649         getDescriptor() {
26650       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledRequest_descriptor;
26651     }
26652 
26653     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()26654         internalGetFieldAccessorTable() {
26655       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledRequest_fieldAccessorTable
26656           .ensureFieldAccessorsInitialized(
26657               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.Builder.class);
26658     }
26659 
26660     public static com.google.protobuf.Parser<IsBalancerEnabledRequest> PARSER =
26661         new com.google.protobuf.AbstractParser<IsBalancerEnabledRequest>() {
26662       public IsBalancerEnabledRequest parsePartialFrom(
26663           com.google.protobuf.CodedInputStream input,
26664           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26665           throws com.google.protobuf.InvalidProtocolBufferException {
26666         return new IsBalancerEnabledRequest(input, extensionRegistry);
26667       }
26668     };
26669 
26670     @java.lang.Override
getParserForType()26671     public com.google.protobuf.Parser<IsBalancerEnabledRequest> getParserForType() {
26672       return PARSER;
26673     }
26674 
initFields()26675     private void initFields() {
26676     }
26677     private byte memoizedIsInitialized = -1;
isInitialized()26678     public final boolean isInitialized() {
26679       byte isInitialized = memoizedIsInitialized;
26680       if (isInitialized != -1) return isInitialized == 1;
26681 
26682       memoizedIsInitialized = 1;
26683       return true;
26684     }
26685 
writeTo(com.google.protobuf.CodedOutputStream output)26686     public void writeTo(com.google.protobuf.CodedOutputStream output)
26687                         throws java.io.IOException {
26688       getSerializedSize();
26689       getUnknownFields().writeTo(output);
26690     }
26691 
26692     private int memoizedSerializedSize = -1;
getSerializedSize()26693     public int getSerializedSize() {
26694       int size = memoizedSerializedSize;
26695       if (size != -1) return size;
26696 
26697       size = 0;
26698       size += getUnknownFields().getSerializedSize();
26699       memoizedSerializedSize = size;
26700       return size;
26701     }
26702 
26703     private static final long serialVersionUID = 0L;
26704     @java.lang.Override
writeReplace()26705     protected java.lang.Object writeReplace()
26706         throws java.io.ObjectStreamException {
26707       return super.writeReplace();
26708     }
26709 
26710     @java.lang.Override
equals(final java.lang.Object obj)26711     public boolean equals(final java.lang.Object obj) {
26712       if (obj == this) {
26713        return true;
26714       }
26715       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)) {
26716         return super.equals(obj);
26717       }
26718       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) obj;
26719 
26720       boolean result = true;
26721       result = result &&
26722           getUnknownFields().equals(other.getUnknownFields());
26723       return result;
26724     }
26725 
26726     private int memoizedHashCode = 0;
26727     @java.lang.Override
hashCode()26728     public int hashCode() {
26729       if (memoizedHashCode != 0) {
26730         return memoizedHashCode;
26731       }
26732       int hash = 41;
26733       hash = (19 * hash) + getDescriptorForType().hashCode();
26734       hash = (29 * hash) + getUnknownFields().hashCode();
26735       memoizedHashCode = hash;
26736       return hash;
26737     }
26738 
parseFrom( com.google.protobuf.ByteString data)26739     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(
26740         com.google.protobuf.ByteString data)
26741         throws com.google.protobuf.InvalidProtocolBufferException {
26742       return PARSER.parseFrom(data);
26743     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26744     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(
26745         com.google.protobuf.ByteString data,
26746         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26747         throws com.google.protobuf.InvalidProtocolBufferException {
26748       return PARSER.parseFrom(data, extensionRegistry);
26749     }
parseFrom(byte[] data)26750     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(byte[] data)
26751         throws com.google.protobuf.InvalidProtocolBufferException {
26752       return PARSER.parseFrom(data);
26753     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26754     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(
26755         byte[] data,
26756         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26757         throws com.google.protobuf.InvalidProtocolBufferException {
26758       return PARSER.parseFrom(data, extensionRegistry);
26759     }
parseFrom(java.io.InputStream input)26760     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(java.io.InputStream input)
26761         throws java.io.IOException {
26762       return PARSER.parseFrom(input);
26763     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26764     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(
26765         java.io.InputStream input,
26766         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26767         throws java.io.IOException {
26768       return PARSER.parseFrom(input, extensionRegistry);
26769     }
parseDelimitedFrom(java.io.InputStream input)26770     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseDelimitedFrom(java.io.InputStream input)
26771         throws java.io.IOException {
26772       return PARSER.parseDelimitedFrom(input);
26773     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26774     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseDelimitedFrom(
26775         java.io.InputStream input,
26776         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26777         throws java.io.IOException {
26778       return PARSER.parseDelimitedFrom(input, extensionRegistry);
26779     }
parseFrom( com.google.protobuf.CodedInputStream input)26780     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(
26781         com.google.protobuf.CodedInputStream input)
26782         throws java.io.IOException {
26783       return PARSER.parseFrom(input);
26784     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26785     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parseFrom(
26786         com.google.protobuf.CodedInputStream input,
26787         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26788         throws java.io.IOException {
26789       return PARSER.parseFrom(input, extensionRegistry);
26790     }
26791 
newBuilder()26792     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()26793     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest prototype)26794     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest prototype) {
26795       return newBuilder().mergeFrom(prototype);
26796     }
toBuilder()26797     public Builder toBuilder() { return newBuilder(this); }
26798 
26799     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)26800     protected Builder newBuilderForType(
26801         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26802       Builder builder = new Builder(parent);
26803       return builder;
26804     }
26805     /**
26806      * Protobuf type {@code IsBalancerEnabledRequest}
26807      */
26808     public static final class Builder extends
26809         com.google.protobuf.GeneratedMessage.Builder<Builder>
26810        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequestOrBuilder {
26811       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()26812           getDescriptor() {
26813         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledRequest_descriptor;
26814       }
26815 
26816       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()26817           internalGetFieldAccessorTable() {
26818         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledRequest_fieldAccessorTable
26819             .ensureFieldAccessorsInitialized(
26820                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.Builder.class);
26821       }
26822 
26823       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.newBuilder()
Builder()26824       private Builder() {
26825         maybeForceBuilderInitialization();
26826       }
26827 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)26828       private Builder(
26829           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26830         super(parent);
26831         maybeForceBuilderInitialization();
26832       }
maybeForceBuilderInitialization()26833       private void maybeForceBuilderInitialization() {
26834         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
26835         }
26836       }
create()26837       private static Builder create() {
26838         return new Builder();
26839       }
26840 
clear()26841       public Builder clear() {
26842         super.clear();
26843         return this;
26844       }
26845 
clone()26846       public Builder clone() {
26847         return create().mergeFrom(buildPartial());
26848       }
26849 
26850       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()26851           getDescriptorForType() {
26852         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledRequest_descriptor;
26853       }
26854 
getDefaultInstanceForType()26855       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest getDefaultInstanceForType() {
26856         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
26857       }
26858 
build()26859       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest build() {
26860         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest result = buildPartial();
26861         if (!result.isInitialized()) {
26862           throw newUninitializedMessageException(result);
26863         }
26864         return result;
26865       }
26866 
buildPartial()26867       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest buildPartial() {
26868         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest(this);
26869         onBuilt();
26870         return result;
26871       }
26872 
mergeFrom(com.google.protobuf.Message other)26873       public Builder mergeFrom(com.google.protobuf.Message other) {
26874         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) {
26875           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)other);
26876         } else {
26877           super.mergeFrom(other);
26878           return this;
26879         }
26880       }
26881 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest other)26882       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest other) {
26883         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance()) return this;
26884         this.mergeUnknownFields(other.getUnknownFields());
26885         return this;
26886       }
26887 
isInitialized()26888       public final boolean isInitialized() {
26889         return true;
26890       }
26891 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26892       public Builder mergeFrom(
26893           com.google.protobuf.CodedInputStream input,
26894           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26895           throws java.io.IOException {
26896         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest parsedMessage = null;
26897         try {
26898           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
26899         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26900           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) e.getUnfinishedMessage();
26901           throw e;
26902         } finally {
26903           if (parsedMessage != null) {
26904             mergeFrom(parsedMessage);
26905           }
26906         }
26907         return this;
26908       }
26909 
26910       // @@protoc_insertion_point(builder_scope:IsBalancerEnabledRequest)
26911     }
26912 
26913     static {
26914       defaultInstance = new IsBalancerEnabledRequest(true);
defaultInstance.initFields()26915       defaultInstance.initFields();
26916     }
26917 
26918     // @@protoc_insertion_point(class_scope:IsBalancerEnabledRequest)
26919   }
26920 
26921   public interface IsBalancerEnabledResponseOrBuilder
26922       extends com.google.protobuf.MessageOrBuilder {
26923 
26924     // required bool enabled = 1;
26925     /**
26926      * <code>required bool enabled = 1;</code>
26927      */
hasEnabled()26928     boolean hasEnabled();
26929     /**
26930      * <code>required bool enabled = 1;</code>
26931      */
getEnabled()26932     boolean getEnabled();
26933   }
26934   /**
26935    * Protobuf type {@code IsBalancerEnabledResponse}
26936    */
26937   public static final class IsBalancerEnabledResponse extends
26938       com.google.protobuf.GeneratedMessage
26939       implements IsBalancerEnabledResponseOrBuilder {
26940     // Use IsBalancerEnabledResponse.newBuilder() to construct.
IsBalancerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)26941     private IsBalancerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
26942       super(builder);
26943       this.unknownFields = builder.getUnknownFields();
26944     }
IsBalancerEnabledResponse(boolean noInit)26945     private IsBalancerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
26946 
26947     private static final IsBalancerEnabledResponse defaultInstance;
getDefaultInstance()26948     public static IsBalancerEnabledResponse getDefaultInstance() {
26949       return defaultInstance;
26950     }
26951 
getDefaultInstanceForType()26952     public IsBalancerEnabledResponse getDefaultInstanceForType() {
26953       return defaultInstance;
26954     }
26955 
26956     private final com.google.protobuf.UnknownFieldSet unknownFields;
26957     @java.lang.Override
26958     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()26959         getUnknownFields() {
26960       return this.unknownFields;
26961     }
IsBalancerEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26962     private IsBalancerEnabledResponse(
26963         com.google.protobuf.CodedInputStream input,
26964         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26965         throws com.google.protobuf.InvalidProtocolBufferException {
26966       initFields();
26967       int mutable_bitField0_ = 0;
26968       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
26969           com.google.protobuf.UnknownFieldSet.newBuilder();
26970       try {
26971         boolean done = false;
26972         while (!done) {
26973           int tag = input.readTag();
26974           switch (tag) {
26975             case 0:
26976               done = true;
26977               break;
26978             default: {
26979               if (!parseUnknownField(input, unknownFields,
26980                                      extensionRegistry, tag)) {
26981                 done = true;
26982               }
26983               break;
26984             }
26985             case 8: {
26986               bitField0_ |= 0x00000001;
26987               enabled_ = input.readBool();
26988               break;
26989             }
26990           }
26991         }
26992       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26993         throw e.setUnfinishedMessage(this);
26994       } catch (java.io.IOException e) {
26995         throw new com.google.protobuf.InvalidProtocolBufferException(
26996             e.getMessage()).setUnfinishedMessage(this);
26997       } finally {
26998         this.unknownFields = unknownFields.build();
26999         makeExtensionsImmutable();
27000       }
27001     }
27002     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27003         getDescriptor() {
27004       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledResponse_descriptor;
27005     }
27006 
27007     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27008         internalGetFieldAccessorTable() {
27009       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledResponse_fieldAccessorTable
27010           .ensureFieldAccessorsInitialized(
27011               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.Builder.class);
27012     }
27013 
27014     public static com.google.protobuf.Parser<IsBalancerEnabledResponse> PARSER =
27015         new com.google.protobuf.AbstractParser<IsBalancerEnabledResponse>() {
27016       public IsBalancerEnabledResponse parsePartialFrom(
27017           com.google.protobuf.CodedInputStream input,
27018           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27019           throws com.google.protobuf.InvalidProtocolBufferException {
27020         return new IsBalancerEnabledResponse(input, extensionRegistry);
27021       }
27022     };
27023 
27024     @java.lang.Override
getParserForType()27025     public com.google.protobuf.Parser<IsBalancerEnabledResponse> getParserForType() {
27026       return PARSER;
27027     }
27028 
27029     private int bitField0_;
27030     // required bool enabled = 1;
27031     public static final int ENABLED_FIELD_NUMBER = 1;
27032     private boolean enabled_;
27033     /**
27034      * <code>required bool enabled = 1;</code>
27035      */
hasEnabled()27036     public boolean hasEnabled() {
27037       return ((bitField0_ & 0x00000001) == 0x00000001);
27038     }
27039     /**
27040      * <code>required bool enabled = 1;</code>
27041      */
getEnabled()27042     public boolean getEnabled() {
27043       return enabled_;
27044     }
27045 
initFields()27046     private void initFields() {
27047       enabled_ = false;
27048     }
27049     private byte memoizedIsInitialized = -1;
isInitialized()27050     public final boolean isInitialized() {
27051       byte isInitialized = memoizedIsInitialized;
27052       if (isInitialized != -1) return isInitialized == 1;
27053 
27054       if (!hasEnabled()) {
27055         memoizedIsInitialized = 0;
27056         return false;
27057       }
27058       memoizedIsInitialized = 1;
27059       return true;
27060     }
27061 
writeTo(com.google.protobuf.CodedOutputStream output)27062     public void writeTo(com.google.protobuf.CodedOutputStream output)
27063                         throws java.io.IOException {
27064       getSerializedSize();
27065       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27066         output.writeBool(1, enabled_);
27067       }
27068       getUnknownFields().writeTo(output);
27069     }
27070 
27071     private int memoizedSerializedSize = -1;
getSerializedSize()27072     public int getSerializedSize() {
27073       int size = memoizedSerializedSize;
27074       if (size != -1) return size;
27075 
27076       size = 0;
27077       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27078         size += com.google.protobuf.CodedOutputStream
27079           .computeBoolSize(1, enabled_);
27080       }
27081       size += getUnknownFields().getSerializedSize();
27082       memoizedSerializedSize = size;
27083       return size;
27084     }
27085 
27086     private static final long serialVersionUID = 0L;
27087     @java.lang.Override
writeReplace()27088     protected java.lang.Object writeReplace()
27089         throws java.io.ObjectStreamException {
27090       return super.writeReplace();
27091     }
27092 
27093     @java.lang.Override
equals(final java.lang.Object obj)27094     public boolean equals(final java.lang.Object obj) {
27095       if (obj == this) {
27096        return true;
27097       }
27098       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse)) {
27099         return super.equals(obj);
27100       }
27101       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) obj;
27102 
27103       boolean result = true;
27104       result = result && (hasEnabled() == other.hasEnabled());
27105       if (hasEnabled()) {
27106         result = result && (getEnabled()
27107             == other.getEnabled());
27108       }
27109       result = result &&
27110           getUnknownFields().equals(other.getUnknownFields());
27111       return result;
27112     }
27113 
27114     private int memoizedHashCode = 0;
27115     @java.lang.Override
hashCode()27116     public int hashCode() {
27117       if (memoizedHashCode != 0) {
27118         return memoizedHashCode;
27119       }
27120       int hash = 41;
27121       hash = (19 * hash) + getDescriptorForType().hashCode();
27122       if (hasEnabled()) {
27123         hash = (37 * hash) + ENABLED_FIELD_NUMBER;
27124         hash = (53 * hash) + hashBoolean(getEnabled());
27125       }
27126       hash = (29 * hash) + getUnknownFields().hashCode();
27127       memoizedHashCode = hash;
27128       return hash;
27129     }
27130 
parseFrom( com.google.protobuf.ByteString data)27131     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(
27132         com.google.protobuf.ByteString data)
27133         throws com.google.protobuf.InvalidProtocolBufferException {
27134       return PARSER.parseFrom(data);
27135     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27136     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(
27137         com.google.protobuf.ByteString data,
27138         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27139         throws com.google.protobuf.InvalidProtocolBufferException {
27140       return PARSER.parseFrom(data, extensionRegistry);
27141     }
parseFrom(byte[] data)27142     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(byte[] data)
27143         throws com.google.protobuf.InvalidProtocolBufferException {
27144       return PARSER.parseFrom(data);
27145     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27146     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(
27147         byte[] data,
27148         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27149         throws com.google.protobuf.InvalidProtocolBufferException {
27150       return PARSER.parseFrom(data, extensionRegistry);
27151     }
parseFrom(java.io.InputStream input)27152     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(java.io.InputStream input)
27153         throws java.io.IOException {
27154       return PARSER.parseFrom(input);
27155     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27156     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(
27157         java.io.InputStream input,
27158         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27159         throws java.io.IOException {
27160       return PARSER.parseFrom(input, extensionRegistry);
27161     }
parseDelimitedFrom(java.io.InputStream input)27162     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseDelimitedFrom(java.io.InputStream input)
27163         throws java.io.IOException {
27164       return PARSER.parseDelimitedFrom(input);
27165     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27166     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseDelimitedFrom(
27167         java.io.InputStream input,
27168         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27169         throws java.io.IOException {
27170       return PARSER.parseDelimitedFrom(input, extensionRegistry);
27171     }
parseFrom( com.google.protobuf.CodedInputStream input)27172     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(
27173         com.google.protobuf.CodedInputStream input)
27174         throws java.io.IOException {
27175       return PARSER.parseFrom(input);
27176     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27177     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parseFrom(
27178         com.google.protobuf.CodedInputStream input,
27179         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27180         throws java.io.IOException {
27181       return PARSER.parseFrom(input, extensionRegistry);
27182     }
27183 
newBuilder()27184     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()27185     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse prototype)27186     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse prototype) {
27187       return newBuilder().mergeFrom(prototype);
27188     }
toBuilder()27189     public Builder toBuilder() { return newBuilder(this); }
27190 
27191     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)27192     protected Builder newBuilderForType(
27193         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27194       Builder builder = new Builder(parent);
27195       return builder;
27196     }
27197     /**
27198      * Protobuf type {@code IsBalancerEnabledResponse}
27199      */
27200     public static final class Builder extends
27201         com.google.protobuf.GeneratedMessage.Builder<Builder>
27202        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponseOrBuilder {
27203       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27204           getDescriptor() {
27205         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledResponse_descriptor;
27206       }
27207 
27208       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27209           internalGetFieldAccessorTable() {
27210         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledResponse_fieldAccessorTable
27211             .ensureFieldAccessorsInitialized(
27212                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.Builder.class);
27213       }
27214 
27215       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.newBuilder()
Builder()27216       private Builder() {
27217         maybeForceBuilderInitialization();
27218       }
27219 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)27220       private Builder(
27221           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27222         super(parent);
27223         maybeForceBuilderInitialization();
27224       }
maybeForceBuilderInitialization()27225       private void maybeForceBuilderInitialization() {
27226         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
27227         }
27228       }
create()27229       private static Builder create() {
27230         return new Builder();
27231       }
27232 
clear()27233       public Builder clear() {
27234         super.clear();
27235         enabled_ = false;
27236         bitField0_ = (bitField0_ & ~0x00000001);
27237         return this;
27238       }
27239 
clone()27240       public Builder clone() {
27241         return create().mergeFrom(buildPartial());
27242       }
27243 
27244       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()27245           getDescriptorForType() {
27246         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsBalancerEnabledResponse_descriptor;
27247       }
27248 
getDefaultInstanceForType()27249       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse getDefaultInstanceForType() {
27250         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
27251       }
27252 
build()27253       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse build() {
27254         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse result = buildPartial();
27255         if (!result.isInitialized()) {
27256           throw newUninitializedMessageException(result);
27257         }
27258         return result;
27259       }
27260 
buildPartial()27261       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse buildPartial() {
27262         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse(this);
27263         int from_bitField0_ = bitField0_;
27264         int to_bitField0_ = 0;
27265         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
27266           to_bitField0_ |= 0x00000001;
27267         }
27268         result.enabled_ = enabled_;
27269         result.bitField0_ = to_bitField0_;
27270         onBuilt();
27271         return result;
27272       }
27273 
mergeFrom(com.google.protobuf.Message other)27274       public Builder mergeFrom(com.google.protobuf.Message other) {
27275         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) {
27276           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse)other);
27277         } else {
27278           super.mergeFrom(other);
27279           return this;
27280         }
27281       }
27282 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse other)27283       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse other) {
27284         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance()) return this;
27285         if (other.hasEnabled()) {
27286           setEnabled(other.getEnabled());
27287         }
27288         this.mergeUnknownFields(other.getUnknownFields());
27289         return this;
27290       }
27291 
isInitialized()27292       public final boolean isInitialized() {
27293         if (!hasEnabled()) {
27294 
27295           return false;
27296         }
27297         return true;
27298       }
27299 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27300       public Builder mergeFrom(
27301           com.google.protobuf.CodedInputStream input,
27302           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27303           throws java.io.IOException {
27304         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse parsedMessage = null;
27305         try {
27306           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
27307         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27308           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) e.getUnfinishedMessage();
27309           throw e;
27310         } finally {
27311           if (parsedMessage != null) {
27312             mergeFrom(parsedMessage);
27313           }
27314         }
27315         return this;
27316       }
27317       private int bitField0_;
27318 
27319       // required bool enabled = 1;
27320       private boolean enabled_ ;
27321       /**
27322        * <code>required bool enabled = 1;</code>
27323        */
hasEnabled()27324       public boolean hasEnabled() {
27325         return ((bitField0_ & 0x00000001) == 0x00000001);
27326       }
27327       /**
27328        * <code>required bool enabled = 1;</code>
27329        */
getEnabled()27330       public boolean getEnabled() {
27331         return enabled_;
27332       }
27333       /**
27334        * <code>required bool enabled = 1;</code>
27335        */
setEnabled(boolean value)27336       public Builder setEnabled(boolean value) {
27337         bitField0_ |= 0x00000001;
27338         enabled_ = value;
27339         onChanged();
27340         return this;
27341       }
27342       /**
27343        * <code>required bool enabled = 1;</code>
27344        */
clearEnabled()27345       public Builder clearEnabled() {
27346         bitField0_ = (bitField0_ & ~0x00000001);
27347         enabled_ = false;
27348         onChanged();
27349         return this;
27350       }
27351 
27352       // @@protoc_insertion_point(builder_scope:IsBalancerEnabledResponse)
27353     }
27354 
27355     static {
27356       defaultInstance = new IsBalancerEnabledResponse(true);
defaultInstance.initFields()27357       defaultInstance.initFields();
27358     }
27359 
27360     // @@protoc_insertion_point(class_scope:IsBalancerEnabledResponse)
27361   }
27362 
27363   public interface NormalizeRequestOrBuilder
27364       extends com.google.protobuf.MessageOrBuilder {
27365   }
27366   /**
27367    * Protobuf type {@code NormalizeRequest}
27368    */
27369   public static final class NormalizeRequest extends
27370       com.google.protobuf.GeneratedMessage
27371       implements NormalizeRequestOrBuilder {
27372     // Use NormalizeRequest.newBuilder() to construct.
NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)27373     private NormalizeRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
27374       super(builder);
27375       this.unknownFields = builder.getUnknownFields();
27376     }
NormalizeRequest(boolean noInit)27377     private NormalizeRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
27378 
27379     private static final NormalizeRequest defaultInstance;
getDefaultInstance()27380     public static NormalizeRequest getDefaultInstance() {
27381       return defaultInstance;
27382     }
27383 
getDefaultInstanceForType()27384     public NormalizeRequest getDefaultInstanceForType() {
27385       return defaultInstance;
27386     }
27387 
27388     private final com.google.protobuf.UnknownFieldSet unknownFields;
27389     @java.lang.Override
27390     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()27391         getUnknownFields() {
27392       return this.unknownFields;
27393     }
NormalizeRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27394     private NormalizeRequest(
27395         com.google.protobuf.CodedInputStream input,
27396         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27397         throws com.google.protobuf.InvalidProtocolBufferException {
27398       initFields();
27399       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
27400           com.google.protobuf.UnknownFieldSet.newBuilder();
27401       try {
27402         boolean done = false;
27403         while (!done) {
27404           int tag = input.readTag();
27405           switch (tag) {
27406             case 0:
27407               done = true;
27408               break;
27409             default: {
27410               if (!parseUnknownField(input, unknownFields,
27411                                      extensionRegistry, tag)) {
27412                 done = true;
27413               }
27414               break;
27415             }
27416           }
27417         }
27418       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27419         throw e.setUnfinishedMessage(this);
27420       } catch (java.io.IOException e) {
27421         throw new com.google.protobuf.InvalidProtocolBufferException(
27422             e.getMessage()).setUnfinishedMessage(this);
27423       } finally {
27424         this.unknownFields = unknownFields.build();
27425         makeExtensionsImmutable();
27426       }
27427     }
27428     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27429         getDescriptor() {
27430       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeRequest_descriptor;
27431     }
27432 
27433     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27434         internalGetFieldAccessorTable() {
27435       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeRequest_fieldAccessorTable
27436           .ensureFieldAccessorsInitialized(
27437               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class);
27438     }
27439 
27440     public static com.google.protobuf.Parser<NormalizeRequest> PARSER =
27441         new com.google.protobuf.AbstractParser<NormalizeRequest>() {
27442       public NormalizeRequest parsePartialFrom(
27443           com.google.protobuf.CodedInputStream input,
27444           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27445           throws com.google.protobuf.InvalidProtocolBufferException {
27446         return new NormalizeRequest(input, extensionRegistry);
27447       }
27448     };
27449 
27450     @java.lang.Override
getParserForType()27451     public com.google.protobuf.Parser<NormalizeRequest> getParserForType() {
27452       return PARSER;
27453     }
27454 
initFields()27455     private void initFields() {
27456     }
27457     private byte memoizedIsInitialized = -1;
isInitialized()27458     public final boolean isInitialized() {
27459       byte isInitialized = memoizedIsInitialized;
27460       if (isInitialized != -1) return isInitialized == 1;
27461 
27462       memoizedIsInitialized = 1;
27463       return true;
27464     }
27465 
writeTo(com.google.protobuf.CodedOutputStream output)27466     public void writeTo(com.google.protobuf.CodedOutputStream output)
27467                         throws java.io.IOException {
27468       getSerializedSize();
27469       getUnknownFields().writeTo(output);
27470     }
27471 
27472     private int memoizedSerializedSize = -1;
getSerializedSize()27473     public int getSerializedSize() {
27474       int size = memoizedSerializedSize;
27475       if (size != -1) return size;
27476 
27477       size = 0;
27478       size += getUnknownFields().getSerializedSize();
27479       memoizedSerializedSize = size;
27480       return size;
27481     }
27482 
27483     private static final long serialVersionUID = 0L;
27484     @java.lang.Override
writeReplace()27485     protected java.lang.Object writeReplace()
27486         throws java.io.ObjectStreamException {
27487       return super.writeReplace();
27488     }
27489 
27490     @java.lang.Override
equals(final java.lang.Object obj)27491     public boolean equals(final java.lang.Object obj) {
27492       if (obj == this) {
27493        return true;
27494       }
27495       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)) {
27496         return super.equals(obj);
27497       }
27498       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) obj;
27499 
27500       boolean result = true;
27501       result = result &&
27502           getUnknownFields().equals(other.getUnknownFields());
27503       return result;
27504     }
27505 
27506     private int memoizedHashCode = 0;
27507     @java.lang.Override
hashCode()27508     public int hashCode() {
27509       if (memoizedHashCode != 0) {
27510         return memoizedHashCode;
27511       }
27512       int hash = 41;
27513       hash = (19 * hash) + getDescriptorForType().hashCode();
27514       hash = (29 * hash) + getUnknownFields().hashCode();
27515       memoizedHashCode = hash;
27516       return hash;
27517     }
27518 
parseFrom( com.google.protobuf.ByteString data)27519     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(
27520         com.google.protobuf.ByteString data)
27521         throws com.google.protobuf.InvalidProtocolBufferException {
27522       return PARSER.parseFrom(data);
27523     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27524     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(
27525         com.google.protobuf.ByteString data,
27526         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27527         throws com.google.protobuf.InvalidProtocolBufferException {
27528       return PARSER.parseFrom(data, extensionRegistry);
27529     }
parseFrom(byte[] data)27530     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(byte[] data)
27531         throws com.google.protobuf.InvalidProtocolBufferException {
27532       return PARSER.parseFrom(data);
27533     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27534     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(
27535         byte[] data,
27536         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27537         throws com.google.protobuf.InvalidProtocolBufferException {
27538       return PARSER.parseFrom(data, extensionRegistry);
27539     }
parseFrom(java.io.InputStream input)27540     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(java.io.InputStream input)
27541         throws java.io.IOException {
27542       return PARSER.parseFrom(input);
27543     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27544     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(
27545         java.io.InputStream input,
27546         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27547         throws java.io.IOException {
27548       return PARSER.parseFrom(input, extensionRegistry);
27549     }
parseDelimitedFrom(java.io.InputStream input)27550     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(java.io.InputStream input)
27551         throws java.io.IOException {
27552       return PARSER.parseDelimitedFrom(input);
27553     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27554     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseDelimitedFrom(
27555         java.io.InputStream input,
27556         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27557         throws java.io.IOException {
27558       return PARSER.parseDelimitedFrom(input, extensionRegistry);
27559     }
parseFrom( com.google.protobuf.CodedInputStream input)27560     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(
27561         com.google.protobuf.CodedInputStream input)
27562         throws java.io.IOException {
27563       return PARSER.parseFrom(input);
27564     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27565     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parseFrom(
27566         com.google.protobuf.CodedInputStream input,
27567         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27568         throws java.io.IOException {
27569       return PARSER.parseFrom(input, extensionRegistry);
27570     }
27571 
newBuilder()27572     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()27573     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest prototype)27574     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest prototype) {
27575       return newBuilder().mergeFrom(prototype);
27576     }
toBuilder()27577     public Builder toBuilder() { return newBuilder(this); }
27578 
27579     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)27580     protected Builder newBuilderForType(
27581         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27582       Builder builder = new Builder(parent);
27583       return builder;
27584     }
27585     /**
27586      * Protobuf type {@code NormalizeRequest}
27587      */
27588     public static final class Builder extends
27589         com.google.protobuf.GeneratedMessage.Builder<Builder>
27590        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequestOrBuilder {
27591       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27592           getDescriptor() {
27593         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeRequest_descriptor;
27594       }
27595 
27596       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27597           internalGetFieldAccessorTable() {
27598         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeRequest_fieldAccessorTable
27599             .ensureFieldAccessorsInitialized(
27600                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.Builder.class);
27601       }
27602 
27603       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.newBuilder()
Builder()27604       private Builder() {
27605         maybeForceBuilderInitialization();
27606       }
27607 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)27608       private Builder(
27609           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27610         super(parent);
27611         maybeForceBuilderInitialization();
27612       }
maybeForceBuilderInitialization()27613       private void maybeForceBuilderInitialization() {
27614         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
27615         }
27616       }
create()27617       private static Builder create() {
27618         return new Builder();
27619       }
27620 
clear()27621       public Builder clear() {
27622         super.clear();
27623         return this;
27624       }
27625 
clone()27626       public Builder clone() {
27627         return create().mergeFrom(buildPartial());
27628       }
27629 
27630       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()27631           getDescriptorForType() {
27632         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeRequest_descriptor;
27633       }
27634 
getDefaultInstanceForType()27635       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest getDefaultInstanceForType() {
27636         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
27637       }
27638 
build()27639       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest build() {
27640         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = buildPartial();
27641         if (!result.isInitialized()) {
27642           throw newUninitializedMessageException(result);
27643         }
27644         return result;
27645       }
27646 
buildPartial()27647       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest buildPartial() {
27648         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest(this);
27649         onBuilt();
27650         return result;
27651       }
27652 
mergeFrom(com.google.protobuf.Message other)27653       public Builder mergeFrom(com.google.protobuf.Message other) {
27654         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) {
27655           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)other);
27656         } else {
27657           super.mergeFrom(other);
27658           return this;
27659         }
27660       }
27661 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other)27662       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest other) {
27663         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance()) return this;
27664         this.mergeUnknownFields(other.getUnknownFields());
27665         return this;
27666       }
27667 
isInitialized()27668       public final boolean isInitialized() {
27669         return true;
27670       }
27671 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27672       public Builder mergeFrom(
27673           com.google.protobuf.CodedInputStream input,
27674           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27675           throws java.io.IOException {
27676         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest parsedMessage = null;
27677         try {
27678           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
27679         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27680           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest) e.getUnfinishedMessage();
27681           throw e;
27682         } finally {
27683           if (parsedMessage != null) {
27684             mergeFrom(parsedMessage);
27685           }
27686         }
27687         return this;
27688       }
27689 
27690       // @@protoc_insertion_point(builder_scope:NormalizeRequest)
27691     }
27692 
27693     static {
27694       defaultInstance = new NormalizeRequest(true);
defaultInstance.initFields()27695       defaultInstance.initFields();
27696     }
27697 
27698     // @@protoc_insertion_point(class_scope:NormalizeRequest)
27699   }
27700 
27701   public interface NormalizeResponseOrBuilder
27702       extends com.google.protobuf.MessageOrBuilder {
27703 
27704     // required bool normalizer_ran = 1;
27705     /**
27706      * <code>required bool normalizer_ran = 1;</code>
27707      */
hasNormalizerRan()27708     boolean hasNormalizerRan();
27709     /**
27710      * <code>required bool normalizer_ran = 1;</code>
27711      */
getNormalizerRan()27712     boolean getNormalizerRan();
27713   }
27714   /**
27715    * Protobuf type {@code NormalizeResponse}
27716    */
27717   public static final class NormalizeResponse extends
27718       com.google.protobuf.GeneratedMessage
27719       implements NormalizeResponseOrBuilder {
27720     // Use NormalizeResponse.newBuilder() to construct.
NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)27721     private NormalizeResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
27722       super(builder);
27723       this.unknownFields = builder.getUnknownFields();
27724     }
NormalizeResponse(boolean noInit)27725     private NormalizeResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
27726 
27727     private static final NormalizeResponse defaultInstance;
getDefaultInstance()27728     public static NormalizeResponse getDefaultInstance() {
27729       return defaultInstance;
27730     }
27731 
getDefaultInstanceForType()27732     public NormalizeResponse getDefaultInstanceForType() {
27733       return defaultInstance;
27734     }
27735 
27736     private final com.google.protobuf.UnknownFieldSet unknownFields;
27737     @java.lang.Override
27738     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()27739         getUnknownFields() {
27740       return this.unknownFields;
27741     }
NormalizeResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27742     private NormalizeResponse(
27743         com.google.protobuf.CodedInputStream input,
27744         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27745         throws com.google.protobuf.InvalidProtocolBufferException {
27746       initFields();
27747       int mutable_bitField0_ = 0;
27748       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
27749           com.google.protobuf.UnknownFieldSet.newBuilder();
27750       try {
27751         boolean done = false;
27752         while (!done) {
27753           int tag = input.readTag();
27754           switch (tag) {
27755             case 0:
27756               done = true;
27757               break;
27758             default: {
27759               if (!parseUnknownField(input, unknownFields,
27760                                      extensionRegistry, tag)) {
27761                 done = true;
27762               }
27763               break;
27764             }
27765             case 8: {
27766               bitField0_ |= 0x00000001;
27767               normalizerRan_ = input.readBool();
27768               break;
27769             }
27770           }
27771         }
27772       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27773         throw e.setUnfinishedMessage(this);
27774       } catch (java.io.IOException e) {
27775         throw new com.google.protobuf.InvalidProtocolBufferException(
27776             e.getMessage()).setUnfinishedMessage(this);
27777       } finally {
27778         this.unknownFields = unknownFields.build();
27779         makeExtensionsImmutable();
27780       }
27781     }
27782     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27783         getDescriptor() {
27784       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeResponse_descriptor;
27785     }
27786 
27787     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27788         internalGetFieldAccessorTable() {
27789       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeResponse_fieldAccessorTable
27790           .ensureFieldAccessorsInitialized(
27791               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class);
27792     }
27793 
27794     public static com.google.protobuf.Parser<NormalizeResponse> PARSER =
27795         new com.google.protobuf.AbstractParser<NormalizeResponse>() {
27796       public NormalizeResponse parsePartialFrom(
27797           com.google.protobuf.CodedInputStream input,
27798           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27799           throws com.google.protobuf.InvalidProtocolBufferException {
27800         return new NormalizeResponse(input, extensionRegistry);
27801       }
27802     };
27803 
27804     @java.lang.Override
getParserForType()27805     public com.google.protobuf.Parser<NormalizeResponse> getParserForType() {
27806       return PARSER;
27807     }
27808 
27809     private int bitField0_;
27810     // required bool normalizer_ran = 1;
27811     public static final int NORMALIZER_RAN_FIELD_NUMBER = 1;
27812     private boolean normalizerRan_;
27813     /**
27814      * <code>required bool normalizer_ran = 1;</code>
27815      */
hasNormalizerRan()27816     public boolean hasNormalizerRan() {
27817       return ((bitField0_ & 0x00000001) == 0x00000001);
27818     }
27819     /**
27820      * <code>required bool normalizer_ran = 1;</code>
27821      */
getNormalizerRan()27822     public boolean getNormalizerRan() {
27823       return normalizerRan_;
27824     }
27825 
initFields()27826     private void initFields() {
27827       normalizerRan_ = false;
27828     }
27829     private byte memoizedIsInitialized = -1;
isInitialized()27830     public final boolean isInitialized() {
27831       byte isInitialized = memoizedIsInitialized;
27832       if (isInitialized != -1) return isInitialized == 1;
27833 
27834       if (!hasNormalizerRan()) {
27835         memoizedIsInitialized = 0;
27836         return false;
27837       }
27838       memoizedIsInitialized = 1;
27839       return true;
27840     }
27841 
writeTo(com.google.protobuf.CodedOutputStream output)27842     public void writeTo(com.google.protobuf.CodedOutputStream output)
27843                         throws java.io.IOException {
27844       getSerializedSize();
27845       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27846         output.writeBool(1, normalizerRan_);
27847       }
27848       getUnknownFields().writeTo(output);
27849     }
27850 
27851     private int memoizedSerializedSize = -1;
getSerializedSize()27852     public int getSerializedSize() {
27853       int size = memoizedSerializedSize;
27854       if (size != -1) return size;
27855 
27856       size = 0;
27857       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27858         size += com.google.protobuf.CodedOutputStream
27859           .computeBoolSize(1, normalizerRan_);
27860       }
27861       size += getUnknownFields().getSerializedSize();
27862       memoizedSerializedSize = size;
27863       return size;
27864     }
27865 
27866     private static final long serialVersionUID = 0L;
27867     @java.lang.Override
writeReplace()27868     protected java.lang.Object writeReplace()
27869         throws java.io.ObjectStreamException {
27870       return super.writeReplace();
27871     }
27872 
27873     @java.lang.Override
equals(final java.lang.Object obj)27874     public boolean equals(final java.lang.Object obj) {
27875       if (obj == this) {
27876        return true;
27877       }
27878       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)) {
27879         return super.equals(obj);
27880       }
27881       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) obj;
27882 
27883       boolean result = true;
27884       result = result && (hasNormalizerRan() == other.hasNormalizerRan());
27885       if (hasNormalizerRan()) {
27886         result = result && (getNormalizerRan()
27887             == other.getNormalizerRan());
27888       }
27889       result = result &&
27890           getUnknownFields().equals(other.getUnknownFields());
27891       return result;
27892     }
27893 
27894     private int memoizedHashCode = 0;
27895     @java.lang.Override
hashCode()27896     public int hashCode() {
27897       if (memoizedHashCode != 0) {
27898         return memoizedHashCode;
27899       }
27900       int hash = 41;
27901       hash = (19 * hash) + getDescriptorForType().hashCode();
27902       if (hasNormalizerRan()) {
27903         hash = (37 * hash) + NORMALIZER_RAN_FIELD_NUMBER;
27904         hash = (53 * hash) + hashBoolean(getNormalizerRan());
27905       }
27906       hash = (29 * hash) + getUnknownFields().hashCode();
27907       memoizedHashCode = hash;
27908       return hash;
27909     }
27910 
parseFrom( com.google.protobuf.ByteString data)27911     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(
27912         com.google.protobuf.ByteString data)
27913         throws com.google.protobuf.InvalidProtocolBufferException {
27914       return PARSER.parseFrom(data);
27915     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27916     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(
27917         com.google.protobuf.ByteString data,
27918         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27919         throws com.google.protobuf.InvalidProtocolBufferException {
27920       return PARSER.parseFrom(data, extensionRegistry);
27921     }
parseFrom(byte[] data)27922     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(byte[] data)
27923         throws com.google.protobuf.InvalidProtocolBufferException {
27924       return PARSER.parseFrom(data);
27925     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27926     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(
27927         byte[] data,
27928         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27929         throws com.google.protobuf.InvalidProtocolBufferException {
27930       return PARSER.parseFrom(data, extensionRegistry);
27931     }
parseFrom(java.io.InputStream input)27932     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(java.io.InputStream input)
27933         throws java.io.IOException {
27934       return PARSER.parseFrom(input);
27935     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27936     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(
27937         java.io.InputStream input,
27938         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27939         throws java.io.IOException {
27940       return PARSER.parseFrom(input, extensionRegistry);
27941     }
parseDelimitedFrom(java.io.InputStream input)27942     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(java.io.InputStream input)
27943         throws java.io.IOException {
27944       return PARSER.parseDelimitedFrom(input);
27945     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27946     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseDelimitedFrom(
27947         java.io.InputStream input,
27948         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27949         throws java.io.IOException {
27950       return PARSER.parseDelimitedFrom(input, extensionRegistry);
27951     }
parseFrom( com.google.protobuf.CodedInputStream input)27952     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(
27953         com.google.protobuf.CodedInputStream input)
27954         throws java.io.IOException {
27955       return PARSER.parseFrom(input);
27956     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27957     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parseFrom(
27958         com.google.protobuf.CodedInputStream input,
27959         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27960         throws java.io.IOException {
27961       return PARSER.parseFrom(input, extensionRegistry);
27962     }
27963 
newBuilder()27964     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()27965     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse prototype)27966     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse prototype) {
27967       return newBuilder().mergeFrom(prototype);
27968     }
toBuilder()27969     public Builder toBuilder() { return newBuilder(this); }
27970 
27971     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)27972     protected Builder newBuilderForType(
27973         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27974       Builder builder = new Builder(parent);
27975       return builder;
27976     }
27977     /**
27978      * Protobuf type {@code NormalizeResponse}
27979      */
27980     public static final class Builder extends
27981         com.google.protobuf.GeneratedMessage.Builder<Builder>
27982        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponseOrBuilder {
27983       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27984           getDescriptor() {
27985         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeResponse_descriptor;
27986       }
27987 
27988       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27989           internalGetFieldAccessorTable() {
27990         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeResponse_fieldAccessorTable
27991             .ensureFieldAccessorsInitialized(
27992                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.Builder.class);
27993       }
27994 
27995       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.newBuilder()
Builder()27996       private Builder() {
27997         maybeForceBuilderInitialization();
27998       }
27999 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)28000       private Builder(
28001           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28002         super(parent);
28003         maybeForceBuilderInitialization();
28004       }
maybeForceBuilderInitialization()28005       private void maybeForceBuilderInitialization() {
28006         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
28007         }
28008       }
create()28009       private static Builder create() {
28010         return new Builder();
28011       }
28012 
clear()28013       public Builder clear() {
28014         super.clear();
28015         normalizerRan_ = false;
28016         bitField0_ = (bitField0_ & ~0x00000001);
28017         return this;
28018       }
28019 
clone()28020       public Builder clone() {
28021         return create().mergeFrom(buildPartial());
28022       }
28023 
28024       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()28025           getDescriptorForType() {
28026         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_NormalizeResponse_descriptor;
28027       }
28028 
getDefaultInstanceForType()28029       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse getDefaultInstanceForType() {
28030         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
28031       }
28032 
build()28033       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse build() {
28034         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = buildPartial();
28035         if (!result.isInitialized()) {
28036           throw newUninitializedMessageException(result);
28037         }
28038         return result;
28039       }
28040 
buildPartial()28041       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse buildPartial() {
28042         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse(this);
28043         int from_bitField0_ = bitField0_;
28044         int to_bitField0_ = 0;
28045         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
28046           to_bitField0_ |= 0x00000001;
28047         }
28048         result.normalizerRan_ = normalizerRan_;
28049         result.bitField0_ = to_bitField0_;
28050         onBuilt();
28051         return result;
28052       }
28053 
mergeFrom(com.google.protobuf.Message other)28054       public Builder mergeFrom(com.google.protobuf.Message other) {
28055         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) {
28056           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse)other);
28057         } else {
28058           super.mergeFrom(other);
28059           return this;
28060         }
28061       }
28062 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other)28063       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse other) {
28064         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance()) return this;
28065         if (other.hasNormalizerRan()) {
28066           setNormalizerRan(other.getNormalizerRan());
28067         }
28068         this.mergeUnknownFields(other.getUnknownFields());
28069         return this;
28070       }
28071 
isInitialized()28072       public final boolean isInitialized() {
28073         if (!hasNormalizerRan()) {
28074 
28075           return false;
28076         }
28077         return true;
28078       }
28079 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28080       public Builder mergeFrom(
28081           com.google.protobuf.CodedInputStream input,
28082           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28083           throws java.io.IOException {
28084         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse parsedMessage = null;
28085         try {
28086           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28087         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28088           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) e.getUnfinishedMessage();
28089           throw e;
28090         } finally {
28091           if (parsedMessage != null) {
28092             mergeFrom(parsedMessage);
28093           }
28094         }
28095         return this;
28096       }
28097       private int bitField0_;
28098 
28099       // required bool normalizer_ran = 1;
28100       private boolean normalizerRan_ ;
28101       /**
28102        * <code>required bool normalizer_ran = 1;</code>
28103        */
hasNormalizerRan()28104       public boolean hasNormalizerRan() {
28105         return ((bitField0_ & 0x00000001) == 0x00000001);
28106       }
28107       /**
28108        * <code>required bool normalizer_ran = 1;</code>
28109        */
getNormalizerRan()28110       public boolean getNormalizerRan() {
28111         return normalizerRan_;
28112       }
28113       /**
28114        * <code>required bool normalizer_ran = 1;</code>
28115        */
setNormalizerRan(boolean value)28116       public Builder setNormalizerRan(boolean value) {
28117         bitField0_ |= 0x00000001;
28118         normalizerRan_ = value;
28119         onChanged();
28120         return this;
28121       }
28122       /**
28123        * <code>required bool normalizer_ran = 1;</code>
28124        */
clearNormalizerRan()28125       public Builder clearNormalizerRan() {
28126         bitField0_ = (bitField0_ & ~0x00000001);
28127         normalizerRan_ = false;
28128         onChanged();
28129         return this;
28130       }
28131 
28132       // @@protoc_insertion_point(builder_scope:NormalizeResponse)
28133     }
28134 
28135     static {
28136       defaultInstance = new NormalizeResponse(true);
defaultInstance.initFields()28137       defaultInstance.initFields();
28138     }
28139 
28140     // @@protoc_insertion_point(class_scope:NormalizeResponse)
28141   }
28142 
28143   public interface SetNormalizerRunningRequestOrBuilder
28144       extends com.google.protobuf.MessageOrBuilder {
28145 
28146     // required bool on = 1;
28147     /**
28148      * <code>required bool on = 1;</code>
28149      */
hasOn()28150     boolean hasOn();
28151     /**
28152      * <code>required bool on = 1;</code>
28153      */
getOn()28154     boolean getOn();
28155   }
28156   /**
28157    * Protobuf type {@code SetNormalizerRunningRequest}
28158    */
28159   public static final class SetNormalizerRunningRequest extends
28160       com.google.protobuf.GeneratedMessage
28161       implements SetNormalizerRunningRequestOrBuilder {
28162     // Use SetNormalizerRunningRequest.newBuilder() to construct.
SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)28163     private SetNormalizerRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
28164       super(builder);
28165       this.unknownFields = builder.getUnknownFields();
28166     }
SetNormalizerRunningRequest(boolean noInit)28167     private SetNormalizerRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
28168 
28169     private static final SetNormalizerRunningRequest defaultInstance;
getDefaultInstance()28170     public static SetNormalizerRunningRequest getDefaultInstance() {
28171       return defaultInstance;
28172     }
28173 
getDefaultInstanceForType()28174     public SetNormalizerRunningRequest getDefaultInstanceForType() {
28175       return defaultInstance;
28176     }
28177 
28178     private final com.google.protobuf.UnknownFieldSet unknownFields;
28179     @java.lang.Override
28180     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()28181         getUnknownFields() {
28182       return this.unknownFields;
28183     }
SetNormalizerRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28184     private SetNormalizerRunningRequest(
28185         com.google.protobuf.CodedInputStream input,
28186         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28187         throws com.google.protobuf.InvalidProtocolBufferException {
28188       initFields();
28189       int mutable_bitField0_ = 0;
28190       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
28191           com.google.protobuf.UnknownFieldSet.newBuilder();
28192       try {
28193         boolean done = false;
28194         while (!done) {
28195           int tag = input.readTag();
28196           switch (tag) {
28197             case 0:
28198               done = true;
28199               break;
28200             default: {
28201               if (!parseUnknownField(input, unknownFields,
28202                                      extensionRegistry, tag)) {
28203                 done = true;
28204               }
28205               break;
28206             }
28207             case 8: {
28208               bitField0_ |= 0x00000001;
28209               on_ = input.readBool();
28210               break;
28211             }
28212           }
28213         }
28214       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28215         throw e.setUnfinishedMessage(this);
28216       } catch (java.io.IOException e) {
28217         throw new com.google.protobuf.InvalidProtocolBufferException(
28218             e.getMessage()).setUnfinishedMessage(this);
28219       } finally {
28220         this.unknownFields = unknownFields.build();
28221         makeExtensionsImmutable();
28222       }
28223     }
28224     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()28225         getDescriptor() {
28226       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningRequest_descriptor;
28227     }
28228 
28229     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()28230         internalGetFieldAccessorTable() {
28231       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningRequest_fieldAccessorTable
28232           .ensureFieldAccessorsInitialized(
28233               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class);
28234     }
28235 
28236     public static com.google.protobuf.Parser<SetNormalizerRunningRequest> PARSER =
28237         new com.google.protobuf.AbstractParser<SetNormalizerRunningRequest>() {
28238       public SetNormalizerRunningRequest parsePartialFrom(
28239           com.google.protobuf.CodedInputStream input,
28240           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28241           throws com.google.protobuf.InvalidProtocolBufferException {
28242         return new SetNormalizerRunningRequest(input, extensionRegistry);
28243       }
28244     };
28245 
28246     @java.lang.Override
getParserForType()28247     public com.google.protobuf.Parser<SetNormalizerRunningRequest> getParserForType() {
28248       return PARSER;
28249     }
28250 
28251     private int bitField0_;
28252     // required bool on = 1;
28253     public static final int ON_FIELD_NUMBER = 1;
28254     private boolean on_;
28255     /**
28256      * <code>required bool on = 1;</code>
28257      */
hasOn()28258     public boolean hasOn() {
28259       return ((bitField0_ & 0x00000001) == 0x00000001);
28260     }
28261     /**
28262      * <code>required bool on = 1;</code>
28263      */
getOn()28264     public boolean getOn() {
28265       return on_;
28266     }
28267 
initFields()28268     private void initFields() {
28269       on_ = false;
28270     }
28271     private byte memoizedIsInitialized = -1;
isInitialized()28272     public final boolean isInitialized() {
28273       byte isInitialized = memoizedIsInitialized;
28274       if (isInitialized != -1) return isInitialized == 1;
28275 
28276       if (!hasOn()) {
28277         memoizedIsInitialized = 0;
28278         return false;
28279       }
28280       memoizedIsInitialized = 1;
28281       return true;
28282     }
28283 
writeTo(com.google.protobuf.CodedOutputStream output)28284     public void writeTo(com.google.protobuf.CodedOutputStream output)
28285                         throws java.io.IOException {
28286       getSerializedSize();
28287       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28288         output.writeBool(1, on_);
28289       }
28290       getUnknownFields().writeTo(output);
28291     }
28292 
28293     private int memoizedSerializedSize = -1;
getSerializedSize()28294     public int getSerializedSize() {
28295       int size = memoizedSerializedSize;
28296       if (size != -1) return size;
28297 
28298       size = 0;
28299       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28300         size += com.google.protobuf.CodedOutputStream
28301           .computeBoolSize(1, on_);
28302       }
28303       size += getUnknownFields().getSerializedSize();
28304       memoizedSerializedSize = size;
28305       return size;
28306     }
28307 
28308     private static final long serialVersionUID = 0L;
28309     @java.lang.Override
writeReplace()28310     protected java.lang.Object writeReplace()
28311         throws java.io.ObjectStreamException {
28312       return super.writeReplace();
28313     }
28314 
28315     @java.lang.Override
equals(final java.lang.Object obj)28316     public boolean equals(final java.lang.Object obj) {
28317       if (obj == this) {
28318        return true;
28319       }
28320       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)) {
28321         return super.equals(obj);
28322       }
28323       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) obj;
28324 
28325       boolean result = true;
28326       result = result && (hasOn() == other.hasOn());
28327       if (hasOn()) {
28328         result = result && (getOn()
28329             == other.getOn());
28330       }
28331       result = result &&
28332           getUnknownFields().equals(other.getUnknownFields());
28333       return result;
28334     }
28335 
28336     private int memoizedHashCode = 0;
28337     @java.lang.Override
hashCode()28338     public int hashCode() {
28339       if (memoizedHashCode != 0) {
28340         return memoizedHashCode;
28341       }
28342       int hash = 41;
28343       hash = (19 * hash) + getDescriptorForType().hashCode();
28344       if (hasOn()) {
28345         hash = (37 * hash) + ON_FIELD_NUMBER;
28346         hash = (53 * hash) + hashBoolean(getOn());
28347       }
28348       hash = (29 * hash) + getUnknownFields().hashCode();
28349       memoizedHashCode = hash;
28350       return hash;
28351     }
28352 
parseFrom( com.google.protobuf.ByteString data)28353     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(
28354         com.google.protobuf.ByteString data)
28355         throws com.google.protobuf.InvalidProtocolBufferException {
28356       return PARSER.parseFrom(data);
28357     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28358     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(
28359         com.google.protobuf.ByteString data,
28360         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28361         throws com.google.protobuf.InvalidProtocolBufferException {
28362       return PARSER.parseFrom(data, extensionRegistry);
28363     }
parseFrom(byte[] data)28364     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(byte[] data)
28365         throws com.google.protobuf.InvalidProtocolBufferException {
28366       return PARSER.parseFrom(data);
28367     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28368     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(
28369         byte[] data,
28370         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28371         throws com.google.protobuf.InvalidProtocolBufferException {
28372       return PARSER.parseFrom(data, extensionRegistry);
28373     }
parseFrom(java.io.InputStream input)28374     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(java.io.InputStream input)
28375         throws java.io.IOException {
28376       return PARSER.parseFrom(input);
28377     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28378     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(
28379         java.io.InputStream input,
28380         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28381         throws java.io.IOException {
28382       return PARSER.parseFrom(input, extensionRegistry);
28383     }
parseDelimitedFrom(java.io.InputStream input)28384     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(java.io.InputStream input)
28385         throws java.io.IOException {
28386       return PARSER.parseDelimitedFrom(input);
28387     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28388     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseDelimitedFrom(
28389         java.io.InputStream input,
28390         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28391         throws java.io.IOException {
28392       return PARSER.parseDelimitedFrom(input, extensionRegistry);
28393     }
parseFrom( com.google.protobuf.CodedInputStream input)28394     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(
28395         com.google.protobuf.CodedInputStream input)
28396         throws java.io.IOException {
28397       return PARSER.parseFrom(input);
28398     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28399     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parseFrom(
28400         com.google.protobuf.CodedInputStream input,
28401         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28402         throws java.io.IOException {
28403       return PARSER.parseFrom(input, extensionRegistry);
28404     }
28405 
newBuilder()28406     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()28407     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype)28408     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest prototype) {
28409       return newBuilder().mergeFrom(prototype);
28410     }
toBuilder()28411     public Builder toBuilder() { return newBuilder(this); }
28412 
28413     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)28414     protected Builder newBuilderForType(
28415         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28416       Builder builder = new Builder(parent);
28417       return builder;
28418     }
28419     /**
28420      * Protobuf type {@code SetNormalizerRunningRequest}
28421      */
28422     public static final class Builder extends
28423         com.google.protobuf.GeneratedMessage.Builder<Builder>
28424        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequestOrBuilder {
28425       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()28426           getDescriptor() {
28427         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningRequest_descriptor;
28428       }
28429 
28430       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()28431           internalGetFieldAccessorTable() {
28432         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningRequest_fieldAccessorTable
28433             .ensureFieldAccessorsInitialized(
28434                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.Builder.class);
28435       }
28436 
28437       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.newBuilder()
Builder()28438       private Builder() {
28439         maybeForceBuilderInitialization();
28440       }
28441 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)28442       private Builder(
28443           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28444         super(parent);
28445         maybeForceBuilderInitialization();
28446       }
maybeForceBuilderInitialization()28447       private void maybeForceBuilderInitialization() {
28448         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
28449         }
28450       }
create()28451       private static Builder create() {
28452         return new Builder();
28453       }
28454 
clear()28455       public Builder clear() {
28456         super.clear();
28457         on_ = false;
28458         bitField0_ = (bitField0_ & ~0x00000001);
28459         return this;
28460       }
28461 
clone()28462       public Builder clone() {
28463         return create().mergeFrom(buildPartial());
28464       }
28465 
28466       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()28467           getDescriptorForType() {
28468         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningRequest_descriptor;
28469       }
28470 
getDefaultInstanceForType()28471       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest getDefaultInstanceForType() {
28472         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
28473       }
28474 
build()28475       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest build() {
28476         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = buildPartial();
28477         if (!result.isInitialized()) {
28478           throw newUninitializedMessageException(result);
28479         }
28480         return result;
28481       }
28482 
buildPartial()28483       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest buildPartial() {
28484         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest(this);
28485         int from_bitField0_ = bitField0_;
28486         int to_bitField0_ = 0;
28487         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
28488           to_bitField0_ |= 0x00000001;
28489         }
28490         result.on_ = on_;
28491         result.bitField0_ = to_bitField0_;
28492         onBuilt();
28493         return result;
28494       }
28495 
mergeFrom(com.google.protobuf.Message other)28496       public Builder mergeFrom(com.google.protobuf.Message other) {
28497         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) {
28498           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)other);
28499         } else {
28500           super.mergeFrom(other);
28501           return this;
28502         }
28503       }
28504 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other)28505       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest other) {
28506         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance()) return this;
28507         if (other.hasOn()) {
28508           setOn(other.getOn());
28509         }
28510         this.mergeUnknownFields(other.getUnknownFields());
28511         return this;
28512       }
28513 
isInitialized()28514       public final boolean isInitialized() {
28515         if (!hasOn()) {
28516 
28517           return false;
28518         }
28519         return true;
28520       }
28521 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28522       public Builder mergeFrom(
28523           com.google.protobuf.CodedInputStream input,
28524           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28525           throws java.io.IOException {
28526         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest parsedMessage = null;
28527         try {
28528           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28529         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28530           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest) e.getUnfinishedMessage();
28531           throw e;
28532         } finally {
28533           if (parsedMessage != null) {
28534             mergeFrom(parsedMessage);
28535           }
28536         }
28537         return this;
28538       }
28539       private int bitField0_;
28540 
28541       // required bool on = 1;
28542       private boolean on_ ;
28543       /**
28544        * <code>required bool on = 1;</code>
28545        */
hasOn()28546       public boolean hasOn() {
28547         return ((bitField0_ & 0x00000001) == 0x00000001);
28548       }
28549       /**
28550        * <code>required bool on = 1;</code>
28551        */
getOn()28552       public boolean getOn() {
28553         return on_;
28554       }
28555       /**
28556        * <code>required bool on = 1;</code>
28557        */
setOn(boolean value)28558       public Builder setOn(boolean value) {
28559         bitField0_ |= 0x00000001;
28560         on_ = value;
28561         onChanged();
28562         return this;
28563       }
28564       /**
28565        * <code>required bool on = 1;</code>
28566        */
clearOn()28567       public Builder clearOn() {
28568         bitField0_ = (bitField0_ & ~0x00000001);
28569         on_ = false;
28570         onChanged();
28571         return this;
28572       }
28573 
28574       // @@protoc_insertion_point(builder_scope:SetNormalizerRunningRequest)
28575     }
28576 
28577     static {
28578       defaultInstance = new SetNormalizerRunningRequest(true);
defaultInstance.initFields()28579       defaultInstance.initFields();
28580     }
28581 
28582     // @@protoc_insertion_point(class_scope:SetNormalizerRunningRequest)
28583   }
28584 
28585   public interface SetNormalizerRunningResponseOrBuilder
28586       extends com.google.protobuf.MessageOrBuilder {
28587 
28588     // optional bool prev_normalizer_value = 1;
28589     /**
28590      * <code>optional bool prev_normalizer_value = 1;</code>
28591      */
hasPrevNormalizerValue()28592     boolean hasPrevNormalizerValue();
28593     /**
28594      * <code>optional bool prev_normalizer_value = 1;</code>
28595      */
getPrevNormalizerValue()28596     boolean getPrevNormalizerValue();
28597   }
28598   /**
28599    * Protobuf type {@code SetNormalizerRunningResponse}
28600    */
28601   public static final class SetNormalizerRunningResponse extends
28602       com.google.protobuf.GeneratedMessage
28603       implements SetNormalizerRunningResponseOrBuilder {
28604     // Use SetNormalizerRunningResponse.newBuilder() to construct.
SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)28605     private SetNormalizerRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
28606       super(builder);
28607       this.unknownFields = builder.getUnknownFields();
28608     }
SetNormalizerRunningResponse(boolean noInit)28609     private SetNormalizerRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
28610 
28611     private static final SetNormalizerRunningResponse defaultInstance;
getDefaultInstance()28612     public static SetNormalizerRunningResponse getDefaultInstance() {
28613       return defaultInstance;
28614     }
28615 
getDefaultInstanceForType()28616     public SetNormalizerRunningResponse getDefaultInstanceForType() {
28617       return defaultInstance;
28618     }
28619 
28620     private final com.google.protobuf.UnknownFieldSet unknownFields;
28621     @java.lang.Override
28622     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()28623         getUnknownFields() {
28624       return this.unknownFields;
28625     }
SetNormalizerRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28626     private SetNormalizerRunningResponse(
28627         com.google.protobuf.CodedInputStream input,
28628         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28629         throws com.google.protobuf.InvalidProtocolBufferException {
28630       initFields();
28631       int mutable_bitField0_ = 0;
28632       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
28633           com.google.protobuf.UnknownFieldSet.newBuilder();
28634       try {
28635         boolean done = false;
28636         while (!done) {
28637           int tag = input.readTag();
28638           switch (tag) {
28639             case 0:
28640               done = true;
28641               break;
28642             default: {
28643               if (!parseUnknownField(input, unknownFields,
28644                                      extensionRegistry, tag)) {
28645                 done = true;
28646               }
28647               break;
28648             }
28649             case 8: {
28650               bitField0_ |= 0x00000001;
28651               prevNormalizerValue_ = input.readBool();
28652               break;
28653             }
28654           }
28655         }
28656       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28657         throw e.setUnfinishedMessage(this);
28658       } catch (java.io.IOException e) {
28659         throw new com.google.protobuf.InvalidProtocolBufferException(
28660             e.getMessage()).setUnfinishedMessage(this);
28661       } finally {
28662         this.unknownFields = unknownFields.build();
28663         makeExtensionsImmutable();
28664       }
28665     }
28666     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()28667         getDescriptor() {
28668       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningResponse_descriptor;
28669     }
28670 
28671     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()28672         internalGetFieldAccessorTable() {
28673       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningResponse_fieldAccessorTable
28674           .ensureFieldAccessorsInitialized(
28675               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class);
28676     }
28677 
28678     public static com.google.protobuf.Parser<SetNormalizerRunningResponse> PARSER =
28679         new com.google.protobuf.AbstractParser<SetNormalizerRunningResponse>() {
28680       public SetNormalizerRunningResponse parsePartialFrom(
28681           com.google.protobuf.CodedInputStream input,
28682           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28683           throws com.google.protobuf.InvalidProtocolBufferException {
28684         return new SetNormalizerRunningResponse(input, extensionRegistry);
28685       }
28686     };
28687 
28688     @java.lang.Override
getParserForType()28689     public com.google.protobuf.Parser<SetNormalizerRunningResponse> getParserForType() {
28690       return PARSER;
28691     }
28692 
28693     private int bitField0_;
28694     // optional bool prev_normalizer_value = 1;
28695     public static final int PREV_NORMALIZER_VALUE_FIELD_NUMBER = 1;
28696     private boolean prevNormalizerValue_;
28697     /**
28698      * <code>optional bool prev_normalizer_value = 1;</code>
28699      */
hasPrevNormalizerValue()28700     public boolean hasPrevNormalizerValue() {
28701       return ((bitField0_ & 0x00000001) == 0x00000001);
28702     }
28703     /**
28704      * <code>optional bool prev_normalizer_value = 1;</code>
28705      */
getPrevNormalizerValue()28706     public boolean getPrevNormalizerValue() {
28707       return prevNormalizerValue_;
28708     }
28709 
initFields()28710     private void initFields() {
28711       prevNormalizerValue_ = false;
28712     }
28713     private byte memoizedIsInitialized = -1;
isInitialized()28714     public final boolean isInitialized() {
28715       byte isInitialized = memoizedIsInitialized;
28716       if (isInitialized != -1) return isInitialized == 1;
28717 
28718       memoizedIsInitialized = 1;
28719       return true;
28720     }
28721 
writeTo(com.google.protobuf.CodedOutputStream output)28722     public void writeTo(com.google.protobuf.CodedOutputStream output)
28723                         throws java.io.IOException {
28724       getSerializedSize();
28725       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28726         output.writeBool(1, prevNormalizerValue_);
28727       }
28728       getUnknownFields().writeTo(output);
28729     }
28730 
28731     private int memoizedSerializedSize = -1;
getSerializedSize()28732     public int getSerializedSize() {
28733       int size = memoizedSerializedSize;
28734       if (size != -1) return size;
28735 
28736       size = 0;
28737       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28738         size += com.google.protobuf.CodedOutputStream
28739           .computeBoolSize(1, prevNormalizerValue_);
28740       }
28741       size += getUnknownFields().getSerializedSize();
28742       memoizedSerializedSize = size;
28743       return size;
28744     }
28745 
28746     private static final long serialVersionUID = 0L;
28747     @java.lang.Override
writeReplace()28748     protected java.lang.Object writeReplace()
28749         throws java.io.ObjectStreamException {
28750       return super.writeReplace();
28751     }
28752 
28753     @java.lang.Override
equals(final java.lang.Object obj)28754     public boolean equals(final java.lang.Object obj) {
28755       if (obj == this) {
28756        return true;
28757       }
28758       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)) {
28759         return super.equals(obj);
28760       }
28761       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) obj;
28762 
28763       boolean result = true;
28764       result = result && (hasPrevNormalizerValue() == other.hasPrevNormalizerValue());
28765       if (hasPrevNormalizerValue()) {
28766         result = result && (getPrevNormalizerValue()
28767             == other.getPrevNormalizerValue());
28768       }
28769       result = result &&
28770           getUnknownFields().equals(other.getUnknownFields());
28771       return result;
28772     }
28773 
28774     private int memoizedHashCode = 0;
28775     @java.lang.Override
hashCode()28776     public int hashCode() {
28777       if (memoizedHashCode != 0) {
28778         return memoizedHashCode;
28779       }
28780       int hash = 41;
28781       hash = (19 * hash) + getDescriptorForType().hashCode();
28782       if (hasPrevNormalizerValue()) {
28783         hash = (37 * hash) + PREV_NORMALIZER_VALUE_FIELD_NUMBER;
28784         hash = (53 * hash) + hashBoolean(getPrevNormalizerValue());
28785       }
28786       hash = (29 * hash) + getUnknownFields().hashCode();
28787       memoizedHashCode = hash;
28788       return hash;
28789     }
28790 
parseFrom( com.google.protobuf.ByteString data)28791     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(
28792         com.google.protobuf.ByteString data)
28793         throws com.google.protobuf.InvalidProtocolBufferException {
28794       return PARSER.parseFrom(data);
28795     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28796     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(
28797         com.google.protobuf.ByteString data,
28798         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28799         throws com.google.protobuf.InvalidProtocolBufferException {
28800       return PARSER.parseFrom(data, extensionRegistry);
28801     }
parseFrom(byte[] data)28802     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(byte[] data)
28803         throws com.google.protobuf.InvalidProtocolBufferException {
28804       return PARSER.parseFrom(data);
28805     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28806     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(
28807         byte[] data,
28808         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28809         throws com.google.protobuf.InvalidProtocolBufferException {
28810       return PARSER.parseFrom(data, extensionRegistry);
28811     }
parseFrom(java.io.InputStream input)28812     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(java.io.InputStream input)
28813         throws java.io.IOException {
28814       return PARSER.parseFrom(input);
28815     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28816     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(
28817         java.io.InputStream input,
28818         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28819         throws java.io.IOException {
28820       return PARSER.parseFrom(input, extensionRegistry);
28821     }
parseDelimitedFrom(java.io.InputStream input)28822     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(java.io.InputStream input)
28823         throws java.io.IOException {
28824       return PARSER.parseDelimitedFrom(input);
28825     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28826     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseDelimitedFrom(
28827         java.io.InputStream input,
28828         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28829         throws java.io.IOException {
28830       return PARSER.parseDelimitedFrom(input, extensionRegistry);
28831     }
parseFrom( com.google.protobuf.CodedInputStream input)28832     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(
28833         com.google.protobuf.CodedInputStream input)
28834         throws java.io.IOException {
28835       return PARSER.parseFrom(input);
28836     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28837     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parseFrom(
28838         com.google.protobuf.CodedInputStream input,
28839         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28840         throws java.io.IOException {
28841       return PARSER.parseFrom(input, extensionRegistry);
28842     }
28843 
newBuilder()28844     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()28845     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype)28846     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse prototype) {
28847       return newBuilder().mergeFrom(prototype);
28848     }
toBuilder()28849     public Builder toBuilder() { return newBuilder(this); }
28850 
28851     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)28852     protected Builder newBuilderForType(
28853         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28854       Builder builder = new Builder(parent);
28855       return builder;
28856     }
28857     /**
28858      * Protobuf type {@code SetNormalizerRunningResponse}
28859      */
28860     public static final class Builder extends
28861         com.google.protobuf.GeneratedMessage.Builder<Builder>
28862        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponseOrBuilder {
28863       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()28864           getDescriptor() {
28865         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningResponse_descriptor;
28866       }
28867 
28868       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()28869           internalGetFieldAccessorTable() {
28870         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningResponse_fieldAccessorTable
28871             .ensureFieldAccessorsInitialized(
28872                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.Builder.class);
28873       }
28874 
28875       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.newBuilder()
Builder()28876       private Builder() {
28877         maybeForceBuilderInitialization();
28878       }
28879 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)28880       private Builder(
28881           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28882         super(parent);
28883         maybeForceBuilderInitialization();
28884       }
maybeForceBuilderInitialization()28885       private void maybeForceBuilderInitialization() {
28886         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
28887         }
28888       }
create()28889       private static Builder create() {
28890         return new Builder();
28891       }
28892 
clear()28893       public Builder clear() {
28894         super.clear();
28895         prevNormalizerValue_ = false;
28896         bitField0_ = (bitField0_ & ~0x00000001);
28897         return this;
28898       }
28899 
clone()28900       public Builder clone() {
28901         return create().mergeFrom(buildPartial());
28902       }
28903 
28904       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()28905           getDescriptorForType() {
28906         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetNormalizerRunningResponse_descriptor;
28907       }
28908 
getDefaultInstanceForType()28909       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse getDefaultInstanceForType() {
28910         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
28911       }
28912 
build()28913       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse build() {
28914         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = buildPartial();
28915         if (!result.isInitialized()) {
28916           throw newUninitializedMessageException(result);
28917         }
28918         return result;
28919       }
28920 
buildPartial()28921       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse buildPartial() {
28922         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse(this);
28923         int from_bitField0_ = bitField0_;
28924         int to_bitField0_ = 0;
28925         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
28926           to_bitField0_ |= 0x00000001;
28927         }
28928         result.prevNormalizerValue_ = prevNormalizerValue_;
28929         result.bitField0_ = to_bitField0_;
28930         onBuilt();
28931         return result;
28932       }
28933 
mergeFrom(com.google.protobuf.Message other)28934       public Builder mergeFrom(com.google.protobuf.Message other) {
28935         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) {
28936           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse)other);
28937         } else {
28938           super.mergeFrom(other);
28939           return this;
28940         }
28941       }
28942 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other)28943       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse other) {
28944         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance()) return this;
28945         if (other.hasPrevNormalizerValue()) {
28946           setPrevNormalizerValue(other.getPrevNormalizerValue());
28947         }
28948         this.mergeUnknownFields(other.getUnknownFields());
28949         return this;
28950       }
28951 
isInitialized()28952       public final boolean isInitialized() {
28953         return true;
28954       }
28955 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28956       public Builder mergeFrom(
28957           com.google.protobuf.CodedInputStream input,
28958           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28959           throws java.io.IOException {
28960         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse parsedMessage = null;
28961         try {
28962           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28963         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28964           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) e.getUnfinishedMessage();
28965           throw e;
28966         } finally {
28967           if (parsedMessage != null) {
28968             mergeFrom(parsedMessage);
28969           }
28970         }
28971         return this;
28972       }
28973       private int bitField0_;
28974 
28975       // optional bool prev_normalizer_value = 1;
28976       private boolean prevNormalizerValue_ ;
28977       /**
28978        * <code>optional bool prev_normalizer_value = 1;</code>
28979        */
hasPrevNormalizerValue()28980       public boolean hasPrevNormalizerValue() {
28981         return ((bitField0_ & 0x00000001) == 0x00000001);
28982       }
28983       /**
28984        * <code>optional bool prev_normalizer_value = 1;</code>
28985        */
getPrevNormalizerValue()28986       public boolean getPrevNormalizerValue() {
28987         return prevNormalizerValue_;
28988       }
28989       /**
28990        * <code>optional bool prev_normalizer_value = 1;</code>
28991        */
setPrevNormalizerValue(boolean value)28992       public Builder setPrevNormalizerValue(boolean value) {
28993         bitField0_ |= 0x00000001;
28994         prevNormalizerValue_ = value;
28995         onChanged();
28996         return this;
28997       }
28998       /**
28999        * <code>optional bool prev_normalizer_value = 1;</code>
29000        */
clearPrevNormalizerValue()29001       public Builder clearPrevNormalizerValue() {
29002         bitField0_ = (bitField0_ & ~0x00000001);
29003         prevNormalizerValue_ = false;
29004         onChanged();
29005         return this;
29006       }
29007 
29008       // @@protoc_insertion_point(builder_scope:SetNormalizerRunningResponse)
29009     }
29010 
29011     static {
29012       defaultInstance = new SetNormalizerRunningResponse(true);
defaultInstance.initFields()29013       defaultInstance.initFields();
29014     }
29015 
29016     // @@protoc_insertion_point(class_scope:SetNormalizerRunningResponse)
29017   }
29018 
29019   public interface IsNormalizerEnabledRequestOrBuilder
29020       extends com.google.protobuf.MessageOrBuilder {
29021   }
29022   /**
29023    * Protobuf type {@code IsNormalizerEnabledRequest}
29024    */
29025   public static final class IsNormalizerEnabledRequest extends
29026       com.google.protobuf.GeneratedMessage
29027       implements IsNormalizerEnabledRequestOrBuilder {
29028     // Use IsNormalizerEnabledRequest.newBuilder() to construct.
IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)29029     private IsNormalizerEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
29030       super(builder);
29031       this.unknownFields = builder.getUnknownFields();
29032     }
IsNormalizerEnabledRequest(boolean noInit)29033     private IsNormalizerEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
29034 
29035     private static final IsNormalizerEnabledRequest defaultInstance;
getDefaultInstance()29036     public static IsNormalizerEnabledRequest getDefaultInstance() {
29037       return defaultInstance;
29038     }
29039 
getDefaultInstanceForType()29040     public IsNormalizerEnabledRequest getDefaultInstanceForType() {
29041       return defaultInstance;
29042     }
29043 
29044     private final com.google.protobuf.UnknownFieldSet unknownFields;
29045     @java.lang.Override
29046     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()29047         getUnknownFields() {
29048       return this.unknownFields;
29049     }
IsNormalizerEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29050     private IsNormalizerEnabledRequest(
29051         com.google.protobuf.CodedInputStream input,
29052         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29053         throws com.google.protobuf.InvalidProtocolBufferException {
29054       initFields();
29055       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
29056           com.google.protobuf.UnknownFieldSet.newBuilder();
29057       try {
29058         boolean done = false;
29059         while (!done) {
29060           int tag = input.readTag();
29061           switch (tag) {
29062             case 0:
29063               done = true;
29064               break;
29065             default: {
29066               if (!parseUnknownField(input, unknownFields,
29067                                      extensionRegistry, tag)) {
29068                 done = true;
29069               }
29070               break;
29071             }
29072           }
29073         }
29074       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29075         throw e.setUnfinishedMessage(this);
29076       } catch (java.io.IOException e) {
29077         throw new com.google.protobuf.InvalidProtocolBufferException(
29078             e.getMessage()).setUnfinishedMessage(this);
29079       } finally {
29080         this.unknownFields = unknownFields.build();
29081         makeExtensionsImmutable();
29082       }
29083     }
29084     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29085         getDescriptor() {
29086       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledRequest_descriptor;
29087     }
29088 
29089     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29090         internalGetFieldAccessorTable() {
29091       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledRequest_fieldAccessorTable
29092           .ensureFieldAccessorsInitialized(
29093               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class);
29094     }
29095 
29096     public static com.google.protobuf.Parser<IsNormalizerEnabledRequest> PARSER =
29097         new com.google.protobuf.AbstractParser<IsNormalizerEnabledRequest>() {
29098       public IsNormalizerEnabledRequest parsePartialFrom(
29099           com.google.protobuf.CodedInputStream input,
29100           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29101           throws com.google.protobuf.InvalidProtocolBufferException {
29102         return new IsNormalizerEnabledRequest(input, extensionRegistry);
29103       }
29104     };
29105 
29106     @java.lang.Override
getParserForType()29107     public com.google.protobuf.Parser<IsNormalizerEnabledRequest> getParserForType() {
29108       return PARSER;
29109     }
29110 
initFields()29111     private void initFields() {
29112     }
29113     private byte memoizedIsInitialized = -1;
isInitialized()29114     public final boolean isInitialized() {
29115       byte isInitialized = memoizedIsInitialized;
29116       if (isInitialized != -1) return isInitialized == 1;
29117 
29118       memoizedIsInitialized = 1;
29119       return true;
29120     }
29121 
writeTo(com.google.protobuf.CodedOutputStream output)29122     public void writeTo(com.google.protobuf.CodedOutputStream output)
29123                         throws java.io.IOException {
29124       getSerializedSize();
29125       getUnknownFields().writeTo(output);
29126     }
29127 
29128     private int memoizedSerializedSize = -1;
getSerializedSize()29129     public int getSerializedSize() {
29130       int size = memoizedSerializedSize;
29131       if (size != -1) return size;
29132 
29133       size = 0;
29134       size += getUnknownFields().getSerializedSize();
29135       memoizedSerializedSize = size;
29136       return size;
29137     }
29138 
29139     private static final long serialVersionUID = 0L;
29140     @java.lang.Override
writeReplace()29141     protected java.lang.Object writeReplace()
29142         throws java.io.ObjectStreamException {
29143       return super.writeReplace();
29144     }
29145 
29146     @java.lang.Override
equals(final java.lang.Object obj)29147     public boolean equals(final java.lang.Object obj) {
29148       if (obj == this) {
29149        return true;
29150       }
29151       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)) {
29152         return super.equals(obj);
29153       }
29154       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) obj;
29155 
29156       boolean result = true;
29157       result = result &&
29158           getUnknownFields().equals(other.getUnknownFields());
29159       return result;
29160     }
29161 
29162     private int memoizedHashCode = 0;
29163     @java.lang.Override
hashCode()29164     public int hashCode() {
29165       if (memoizedHashCode != 0) {
29166         return memoizedHashCode;
29167       }
29168       int hash = 41;
29169       hash = (19 * hash) + getDescriptorForType().hashCode();
29170       hash = (29 * hash) + getUnknownFields().hashCode();
29171       memoizedHashCode = hash;
29172       return hash;
29173     }
29174 
parseFrom( com.google.protobuf.ByteString data)29175     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(
29176         com.google.protobuf.ByteString data)
29177         throws com.google.protobuf.InvalidProtocolBufferException {
29178       return PARSER.parseFrom(data);
29179     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29180     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(
29181         com.google.protobuf.ByteString data,
29182         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29183         throws com.google.protobuf.InvalidProtocolBufferException {
29184       return PARSER.parseFrom(data, extensionRegistry);
29185     }
parseFrom(byte[] data)29186     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(byte[] data)
29187         throws com.google.protobuf.InvalidProtocolBufferException {
29188       return PARSER.parseFrom(data);
29189     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29190     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(
29191         byte[] data,
29192         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29193         throws com.google.protobuf.InvalidProtocolBufferException {
29194       return PARSER.parseFrom(data, extensionRegistry);
29195     }
parseFrom(java.io.InputStream input)29196     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(java.io.InputStream input)
29197         throws java.io.IOException {
29198       return PARSER.parseFrom(input);
29199     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29200     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(
29201         java.io.InputStream input,
29202         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29203         throws java.io.IOException {
29204       return PARSER.parseFrom(input, extensionRegistry);
29205     }
parseDelimitedFrom(java.io.InputStream input)29206     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(java.io.InputStream input)
29207         throws java.io.IOException {
29208       return PARSER.parseDelimitedFrom(input);
29209     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29210     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseDelimitedFrom(
29211         java.io.InputStream input,
29212         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29213         throws java.io.IOException {
29214       return PARSER.parseDelimitedFrom(input, extensionRegistry);
29215     }
parseFrom( com.google.protobuf.CodedInputStream input)29216     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(
29217         com.google.protobuf.CodedInputStream input)
29218         throws java.io.IOException {
29219       return PARSER.parseFrom(input);
29220     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29221     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parseFrom(
29222         com.google.protobuf.CodedInputStream input,
29223         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29224         throws java.io.IOException {
29225       return PARSER.parseFrom(input, extensionRegistry);
29226     }
29227 
newBuilder()29228     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()29229     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype)29230     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest prototype) {
29231       return newBuilder().mergeFrom(prototype);
29232     }
toBuilder()29233     public Builder toBuilder() { return newBuilder(this); }
29234 
29235     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)29236     protected Builder newBuilderForType(
29237         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29238       Builder builder = new Builder(parent);
29239       return builder;
29240     }
29241     /**
29242      * Protobuf type {@code IsNormalizerEnabledRequest}
29243      */
29244     public static final class Builder extends
29245         com.google.protobuf.GeneratedMessage.Builder<Builder>
29246        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequestOrBuilder {
29247       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29248           getDescriptor() {
29249         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledRequest_descriptor;
29250       }
29251 
29252       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29253           internalGetFieldAccessorTable() {
29254         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledRequest_fieldAccessorTable
29255             .ensureFieldAccessorsInitialized(
29256                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.Builder.class);
29257       }
29258 
29259       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.newBuilder()
Builder()29260       private Builder() {
29261         maybeForceBuilderInitialization();
29262       }
29263 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)29264       private Builder(
29265           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29266         super(parent);
29267         maybeForceBuilderInitialization();
29268       }
maybeForceBuilderInitialization()29269       private void maybeForceBuilderInitialization() {
29270         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
29271         }
29272       }
create()29273       private static Builder create() {
29274         return new Builder();
29275       }
29276 
clear()29277       public Builder clear() {
29278         super.clear();
29279         return this;
29280       }
29281 
clone()29282       public Builder clone() {
29283         return create().mergeFrom(buildPartial());
29284       }
29285 
29286       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()29287           getDescriptorForType() {
29288         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledRequest_descriptor;
29289       }
29290 
getDefaultInstanceForType()29291       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest getDefaultInstanceForType() {
29292         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
29293       }
29294 
build()29295       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest build() {
29296         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = buildPartial();
29297         if (!result.isInitialized()) {
29298           throw newUninitializedMessageException(result);
29299         }
29300         return result;
29301       }
29302 
buildPartial()29303       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest buildPartial() {
29304         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest(this);
29305         onBuilt();
29306         return result;
29307       }
29308 
mergeFrom(com.google.protobuf.Message other)29309       public Builder mergeFrom(com.google.protobuf.Message other) {
29310         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) {
29311           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)other);
29312         } else {
29313           super.mergeFrom(other);
29314           return this;
29315         }
29316       }
29317 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other)29318       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest other) {
29319         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance()) return this;
29320         this.mergeUnknownFields(other.getUnknownFields());
29321         return this;
29322       }
29323 
isInitialized()29324       public final boolean isInitialized() {
29325         return true;
29326       }
29327 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29328       public Builder mergeFrom(
29329           com.google.protobuf.CodedInputStream input,
29330           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29331           throws java.io.IOException {
29332         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest parsedMessage = null;
29333         try {
29334           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
29335         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29336           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest) e.getUnfinishedMessage();
29337           throw e;
29338         } finally {
29339           if (parsedMessage != null) {
29340             mergeFrom(parsedMessage);
29341           }
29342         }
29343         return this;
29344       }
29345 
29346       // @@protoc_insertion_point(builder_scope:IsNormalizerEnabledRequest)
29347     }
29348 
29349     static {
29350       defaultInstance = new IsNormalizerEnabledRequest(true);
defaultInstance.initFields()29351       defaultInstance.initFields();
29352     }
29353 
29354     // @@protoc_insertion_point(class_scope:IsNormalizerEnabledRequest)
29355   }
29356 
29357   public interface IsNormalizerEnabledResponseOrBuilder
29358       extends com.google.protobuf.MessageOrBuilder {
29359 
29360     // required bool enabled = 1;
29361     /**
29362      * <code>required bool enabled = 1;</code>
29363      */
hasEnabled()29364     boolean hasEnabled();
29365     /**
29366      * <code>required bool enabled = 1;</code>
29367      */
getEnabled()29368     boolean getEnabled();
29369   }
29370   /**
29371    * Protobuf type {@code IsNormalizerEnabledResponse}
29372    */
29373   public static final class IsNormalizerEnabledResponse extends
29374       com.google.protobuf.GeneratedMessage
29375       implements IsNormalizerEnabledResponseOrBuilder {
29376     // Use IsNormalizerEnabledResponse.newBuilder() to construct.
IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)29377     private IsNormalizerEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
29378       super(builder);
29379       this.unknownFields = builder.getUnknownFields();
29380     }
IsNormalizerEnabledResponse(boolean noInit)29381     private IsNormalizerEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
29382 
29383     private static final IsNormalizerEnabledResponse defaultInstance;
getDefaultInstance()29384     public static IsNormalizerEnabledResponse getDefaultInstance() {
29385       return defaultInstance;
29386     }
29387 
getDefaultInstanceForType()29388     public IsNormalizerEnabledResponse getDefaultInstanceForType() {
29389       return defaultInstance;
29390     }
29391 
29392     private final com.google.protobuf.UnknownFieldSet unknownFields;
29393     @java.lang.Override
29394     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()29395         getUnknownFields() {
29396       return this.unknownFields;
29397     }
IsNormalizerEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29398     private IsNormalizerEnabledResponse(
29399         com.google.protobuf.CodedInputStream input,
29400         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29401         throws com.google.protobuf.InvalidProtocolBufferException {
29402       initFields();
29403       int mutable_bitField0_ = 0;
29404       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
29405           com.google.protobuf.UnknownFieldSet.newBuilder();
29406       try {
29407         boolean done = false;
29408         while (!done) {
29409           int tag = input.readTag();
29410           switch (tag) {
29411             case 0:
29412               done = true;
29413               break;
29414             default: {
29415               if (!parseUnknownField(input, unknownFields,
29416                                      extensionRegistry, tag)) {
29417                 done = true;
29418               }
29419               break;
29420             }
29421             case 8: {
29422               bitField0_ |= 0x00000001;
29423               enabled_ = input.readBool();
29424               break;
29425             }
29426           }
29427         }
29428       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29429         throw e.setUnfinishedMessage(this);
29430       } catch (java.io.IOException e) {
29431         throw new com.google.protobuf.InvalidProtocolBufferException(
29432             e.getMessage()).setUnfinishedMessage(this);
29433       } finally {
29434         this.unknownFields = unknownFields.build();
29435         makeExtensionsImmutable();
29436       }
29437     }
29438     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29439         getDescriptor() {
29440       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledResponse_descriptor;
29441     }
29442 
29443     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29444         internalGetFieldAccessorTable() {
29445       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledResponse_fieldAccessorTable
29446           .ensureFieldAccessorsInitialized(
29447               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.Builder.class);
29448     }
29449 
29450     public static com.google.protobuf.Parser<IsNormalizerEnabledResponse> PARSER =
29451         new com.google.protobuf.AbstractParser<IsNormalizerEnabledResponse>() {
29452       public IsNormalizerEnabledResponse parsePartialFrom(
29453           com.google.protobuf.CodedInputStream input,
29454           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29455           throws com.google.protobuf.InvalidProtocolBufferException {
29456         return new IsNormalizerEnabledResponse(input, extensionRegistry);
29457       }
29458     };
29459 
29460     @java.lang.Override
getParserForType()29461     public com.google.protobuf.Parser<IsNormalizerEnabledResponse> getParserForType() {
29462       return PARSER;
29463     }
29464 
29465     private int bitField0_;
29466     // required bool enabled = 1;
29467     public static final int ENABLED_FIELD_NUMBER = 1;
29468     private boolean enabled_;
29469     /**
29470      * <code>required bool enabled = 1;</code>
29471      */
hasEnabled()29472     public boolean hasEnabled() {
29473       return ((bitField0_ & 0x00000001) == 0x00000001);
29474     }
29475     /**
29476      * <code>required bool enabled = 1;</code>
29477      */
getEnabled()29478     public boolean getEnabled() {
29479       return enabled_;
29480     }
29481 
initFields()29482     private void initFields() {
29483       enabled_ = false;
29484     }
29485     private byte memoizedIsInitialized = -1;
isInitialized()29486     public final boolean isInitialized() {
29487       byte isInitialized = memoizedIsInitialized;
29488       if (isInitialized != -1) return isInitialized == 1;
29489 
29490       if (!hasEnabled()) {
29491         memoizedIsInitialized = 0;
29492         return false;
29493       }
29494       memoizedIsInitialized = 1;
29495       return true;
29496     }
29497 
writeTo(com.google.protobuf.CodedOutputStream output)29498     public void writeTo(com.google.protobuf.CodedOutputStream output)
29499                         throws java.io.IOException {
29500       getSerializedSize();
29501       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29502         output.writeBool(1, enabled_);
29503       }
29504       getUnknownFields().writeTo(output);
29505     }
29506 
29507     private int memoizedSerializedSize = -1;
getSerializedSize()29508     public int getSerializedSize() {
29509       int size = memoizedSerializedSize;
29510       if (size != -1) return size;
29511 
29512       size = 0;
29513       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29514         size += com.google.protobuf.CodedOutputStream
29515           .computeBoolSize(1, enabled_);
29516       }
29517       size += getUnknownFields().getSerializedSize();
29518       memoizedSerializedSize = size;
29519       return size;
29520     }
29521 
29522     private static final long serialVersionUID = 0L;
29523     @java.lang.Override
writeReplace()29524     protected java.lang.Object writeReplace()
29525         throws java.io.ObjectStreamException {
29526       return super.writeReplace();
29527     }
29528 
29529     @java.lang.Override
equals(final java.lang.Object obj)29530     public boolean equals(final java.lang.Object obj) {
29531       if (obj == this) {
29532        return true;
29533       }
29534       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse)) {
29535         return super.equals(obj);
29536       }
29537       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) obj;
29538 
29539       boolean result = true;
29540       result = result && (hasEnabled() == other.hasEnabled());
29541       if (hasEnabled()) {
29542         result = result && (getEnabled()
29543             == other.getEnabled());
29544       }
29545       result = result &&
29546           getUnknownFields().equals(other.getUnknownFields());
29547       return result;
29548     }
29549 
29550     private int memoizedHashCode = 0;
29551     @java.lang.Override
hashCode()29552     public int hashCode() {
29553       if (memoizedHashCode != 0) {
29554         return memoizedHashCode;
29555       }
29556       int hash = 41;
29557       hash = (19 * hash) + getDescriptorForType().hashCode();
29558       if (hasEnabled()) {
29559         hash = (37 * hash) + ENABLED_FIELD_NUMBER;
29560         hash = (53 * hash) + hashBoolean(getEnabled());
29561       }
29562       hash = (29 * hash) + getUnknownFields().hashCode();
29563       memoizedHashCode = hash;
29564       return hash;
29565     }
29566 
parseFrom( com.google.protobuf.ByteString data)29567     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(
29568         com.google.protobuf.ByteString data)
29569         throws com.google.protobuf.InvalidProtocolBufferException {
29570       return PARSER.parseFrom(data);
29571     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29572     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(
29573         com.google.protobuf.ByteString data,
29574         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29575         throws com.google.protobuf.InvalidProtocolBufferException {
29576       return PARSER.parseFrom(data, extensionRegistry);
29577     }
parseFrom(byte[] data)29578     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(byte[] data)
29579         throws com.google.protobuf.InvalidProtocolBufferException {
29580       return PARSER.parseFrom(data);
29581     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29582     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(
29583         byte[] data,
29584         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29585         throws com.google.protobuf.InvalidProtocolBufferException {
29586       return PARSER.parseFrom(data, extensionRegistry);
29587     }
parseFrom(java.io.InputStream input)29588     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(java.io.InputStream input)
29589         throws java.io.IOException {
29590       return PARSER.parseFrom(input);
29591     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29592     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(
29593         java.io.InputStream input,
29594         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29595         throws java.io.IOException {
29596       return PARSER.parseFrom(input, extensionRegistry);
29597     }
parseDelimitedFrom(java.io.InputStream input)29598     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseDelimitedFrom(java.io.InputStream input)
29599         throws java.io.IOException {
29600       return PARSER.parseDelimitedFrom(input);
29601     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29602     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseDelimitedFrom(
29603         java.io.InputStream input,
29604         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29605         throws java.io.IOException {
29606       return PARSER.parseDelimitedFrom(input, extensionRegistry);
29607     }
parseFrom( com.google.protobuf.CodedInputStream input)29608     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(
29609         com.google.protobuf.CodedInputStream input)
29610         throws java.io.IOException {
29611       return PARSER.parseFrom(input);
29612     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29613     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parseFrom(
29614         com.google.protobuf.CodedInputStream input,
29615         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29616         throws java.io.IOException {
29617       return PARSER.parseFrom(input, extensionRegistry);
29618     }
29619 
newBuilder()29620     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()29621     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse prototype)29622     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse prototype) {
29623       return newBuilder().mergeFrom(prototype);
29624     }
toBuilder()29625     public Builder toBuilder() { return newBuilder(this); }
29626 
29627     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)29628     protected Builder newBuilderForType(
29629         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29630       Builder builder = new Builder(parent);
29631       return builder;
29632     }
29633     /**
29634      * Protobuf type {@code IsNormalizerEnabledResponse}
29635      */
29636     public static final class Builder extends
29637         com.google.protobuf.GeneratedMessage.Builder<Builder>
29638        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponseOrBuilder {
29639       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29640           getDescriptor() {
29641         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledResponse_descriptor;
29642       }
29643 
29644       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29645           internalGetFieldAccessorTable() {
29646         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledResponse_fieldAccessorTable
29647             .ensureFieldAccessorsInitialized(
29648                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.Builder.class);
29649       }
29650 
29651       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.newBuilder()
Builder()29652       private Builder() {
29653         maybeForceBuilderInitialization();
29654       }
29655 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)29656       private Builder(
29657           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29658         super(parent);
29659         maybeForceBuilderInitialization();
29660       }
maybeForceBuilderInitialization()29661       private void maybeForceBuilderInitialization() {
29662         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
29663         }
29664       }
create()29665       private static Builder create() {
29666         return new Builder();
29667       }
29668 
clear()29669       public Builder clear() {
29670         super.clear();
29671         enabled_ = false;
29672         bitField0_ = (bitField0_ & ~0x00000001);
29673         return this;
29674       }
29675 
clone()29676       public Builder clone() {
29677         return create().mergeFrom(buildPartial());
29678       }
29679 
29680       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()29681           getDescriptorForType() {
29682         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsNormalizerEnabledResponse_descriptor;
29683       }
29684 
getDefaultInstanceForType()29685       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse getDefaultInstanceForType() {
29686         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
29687       }
29688 
build()29689       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse build() {
29690         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse result = buildPartial();
29691         if (!result.isInitialized()) {
29692           throw newUninitializedMessageException(result);
29693         }
29694         return result;
29695       }
29696 
buildPartial()29697       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse buildPartial() {
29698         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse(this);
29699         int from_bitField0_ = bitField0_;
29700         int to_bitField0_ = 0;
29701         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
29702           to_bitField0_ |= 0x00000001;
29703         }
29704         result.enabled_ = enabled_;
29705         result.bitField0_ = to_bitField0_;
29706         onBuilt();
29707         return result;
29708       }
29709 
mergeFrom(com.google.protobuf.Message other)29710       public Builder mergeFrom(com.google.protobuf.Message other) {
29711         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) {
29712           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse)other);
29713         } else {
29714           super.mergeFrom(other);
29715           return this;
29716         }
29717       }
29718 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse other)29719       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse other) {
29720         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance()) return this;
29721         if (other.hasEnabled()) {
29722           setEnabled(other.getEnabled());
29723         }
29724         this.mergeUnknownFields(other.getUnknownFields());
29725         return this;
29726       }
29727 
isInitialized()29728       public final boolean isInitialized() {
29729         if (!hasEnabled()) {
29730 
29731           return false;
29732         }
29733         return true;
29734       }
29735 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29736       public Builder mergeFrom(
29737           com.google.protobuf.CodedInputStream input,
29738           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29739           throws java.io.IOException {
29740         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse parsedMessage = null;
29741         try {
29742           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
29743         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29744           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) e.getUnfinishedMessage();
29745           throw e;
29746         } finally {
29747           if (parsedMessage != null) {
29748             mergeFrom(parsedMessage);
29749           }
29750         }
29751         return this;
29752       }
29753       private int bitField0_;
29754 
29755       // required bool enabled = 1;
29756       private boolean enabled_ ;
29757       /**
29758        * <code>required bool enabled = 1;</code>
29759        */
hasEnabled()29760       public boolean hasEnabled() {
29761         return ((bitField0_ & 0x00000001) == 0x00000001);
29762       }
29763       /**
29764        * <code>required bool enabled = 1;</code>
29765        */
getEnabled()29766       public boolean getEnabled() {
29767         return enabled_;
29768       }
29769       /**
29770        * <code>required bool enabled = 1;</code>
29771        */
setEnabled(boolean value)29772       public Builder setEnabled(boolean value) {
29773         bitField0_ |= 0x00000001;
29774         enabled_ = value;
29775         onChanged();
29776         return this;
29777       }
29778       /**
29779        * <code>required bool enabled = 1;</code>
29780        */
clearEnabled()29781       public Builder clearEnabled() {
29782         bitField0_ = (bitField0_ & ~0x00000001);
29783         enabled_ = false;
29784         onChanged();
29785         return this;
29786       }
29787 
29788       // @@protoc_insertion_point(builder_scope:IsNormalizerEnabledResponse)
29789     }
29790 
29791     static {
29792       defaultInstance = new IsNormalizerEnabledResponse(true);
defaultInstance.initFields()29793       defaultInstance.initFields();
29794     }
29795 
29796     // @@protoc_insertion_point(class_scope:IsNormalizerEnabledResponse)
29797   }
29798 
29799   public interface RunCatalogScanRequestOrBuilder
29800       extends com.google.protobuf.MessageOrBuilder {
29801   }
29802   /**
29803    * Protobuf type {@code RunCatalogScanRequest}
29804    */
29805   public static final class RunCatalogScanRequest extends
29806       com.google.protobuf.GeneratedMessage
29807       implements RunCatalogScanRequestOrBuilder {
29808     // Use RunCatalogScanRequest.newBuilder() to construct.
RunCatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)29809     private RunCatalogScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
29810       super(builder);
29811       this.unknownFields = builder.getUnknownFields();
29812     }
RunCatalogScanRequest(boolean noInit)29813     private RunCatalogScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
29814 
29815     private static final RunCatalogScanRequest defaultInstance;
getDefaultInstance()29816     public static RunCatalogScanRequest getDefaultInstance() {
29817       return defaultInstance;
29818     }
29819 
getDefaultInstanceForType()29820     public RunCatalogScanRequest getDefaultInstanceForType() {
29821       return defaultInstance;
29822     }
29823 
29824     private final com.google.protobuf.UnknownFieldSet unknownFields;
29825     @java.lang.Override
29826     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()29827         getUnknownFields() {
29828       return this.unknownFields;
29829     }
RunCatalogScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29830     private RunCatalogScanRequest(
29831         com.google.protobuf.CodedInputStream input,
29832         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29833         throws com.google.protobuf.InvalidProtocolBufferException {
29834       initFields();
29835       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
29836           com.google.protobuf.UnknownFieldSet.newBuilder();
29837       try {
29838         boolean done = false;
29839         while (!done) {
29840           int tag = input.readTag();
29841           switch (tag) {
29842             case 0:
29843               done = true;
29844               break;
29845             default: {
29846               if (!parseUnknownField(input, unknownFields,
29847                                      extensionRegistry, tag)) {
29848                 done = true;
29849               }
29850               break;
29851             }
29852           }
29853         }
29854       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29855         throw e.setUnfinishedMessage(this);
29856       } catch (java.io.IOException e) {
29857         throw new com.google.protobuf.InvalidProtocolBufferException(
29858             e.getMessage()).setUnfinishedMessage(this);
29859       } finally {
29860         this.unknownFields = unknownFields.build();
29861         makeExtensionsImmutable();
29862       }
29863     }
29864     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29865         getDescriptor() {
29866       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_descriptor;
29867     }
29868 
29869     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29870         internalGetFieldAccessorTable() {
29871       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_fieldAccessorTable
29872           .ensureFieldAccessorsInitialized(
29873               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.Builder.class);
29874     }
29875 
29876     public static com.google.protobuf.Parser<RunCatalogScanRequest> PARSER =
29877         new com.google.protobuf.AbstractParser<RunCatalogScanRequest>() {
29878       public RunCatalogScanRequest parsePartialFrom(
29879           com.google.protobuf.CodedInputStream input,
29880           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29881           throws com.google.protobuf.InvalidProtocolBufferException {
29882         return new RunCatalogScanRequest(input, extensionRegistry);
29883       }
29884     };
29885 
29886     @java.lang.Override
getParserForType()29887     public com.google.protobuf.Parser<RunCatalogScanRequest> getParserForType() {
29888       return PARSER;
29889     }
29890 
initFields()29891     private void initFields() {
29892     }
29893     private byte memoizedIsInitialized = -1;
isInitialized()29894     public final boolean isInitialized() {
29895       byte isInitialized = memoizedIsInitialized;
29896       if (isInitialized != -1) return isInitialized == 1;
29897 
29898       memoizedIsInitialized = 1;
29899       return true;
29900     }
29901 
writeTo(com.google.protobuf.CodedOutputStream output)29902     public void writeTo(com.google.protobuf.CodedOutputStream output)
29903                         throws java.io.IOException {
29904       getSerializedSize();
29905       getUnknownFields().writeTo(output);
29906     }
29907 
29908     private int memoizedSerializedSize = -1;
getSerializedSize()29909     public int getSerializedSize() {
29910       int size = memoizedSerializedSize;
29911       if (size != -1) return size;
29912 
29913       size = 0;
29914       size += getUnknownFields().getSerializedSize();
29915       memoizedSerializedSize = size;
29916       return size;
29917     }
29918 
29919     private static final long serialVersionUID = 0L;
29920     @java.lang.Override
writeReplace()29921     protected java.lang.Object writeReplace()
29922         throws java.io.ObjectStreamException {
29923       return super.writeReplace();
29924     }
29925 
29926     @java.lang.Override
equals(final java.lang.Object obj)29927     public boolean equals(final java.lang.Object obj) {
29928       if (obj == this) {
29929        return true;
29930       }
29931       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)) {
29932         return super.equals(obj);
29933       }
29934       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest) obj;
29935 
29936       boolean result = true;
29937       result = result &&
29938           getUnknownFields().equals(other.getUnknownFields());
29939       return result;
29940     }
29941 
29942     private int memoizedHashCode = 0;
29943     @java.lang.Override
hashCode()29944     public int hashCode() {
29945       if (memoizedHashCode != 0) {
29946         return memoizedHashCode;
29947       }
29948       int hash = 41;
29949       hash = (19 * hash) + getDescriptorForType().hashCode();
29950       hash = (29 * hash) + getUnknownFields().hashCode();
29951       memoizedHashCode = hash;
29952       return hash;
29953     }
29954 
parseFrom( com.google.protobuf.ByteString data)29955     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(
29956         com.google.protobuf.ByteString data)
29957         throws com.google.protobuf.InvalidProtocolBufferException {
29958       return PARSER.parseFrom(data);
29959     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29960     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(
29961         com.google.protobuf.ByteString data,
29962         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29963         throws com.google.protobuf.InvalidProtocolBufferException {
29964       return PARSER.parseFrom(data, extensionRegistry);
29965     }
parseFrom(byte[] data)29966     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(byte[] data)
29967         throws com.google.protobuf.InvalidProtocolBufferException {
29968       return PARSER.parseFrom(data);
29969     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29970     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(
29971         byte[] data,
29972         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29973         throws com.google.protobuf.InvalidProtocolBufferException {
29974       return PARSER.parseFrom(data, extensionRegistry);
29975     }
parseFrom(java.io.InputStream input)29976     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(java.io.InputStream input)
29977         throws java.io.IOException {
29978       return PARSER.parseFrom(input);
29979     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29980     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(
29981         java.io.InputStream input,
29982         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29983         throws java.io.IOException {
29984       return PARSER.parseFrom(input, extensionRegistry);
29985     }
parseDelimitedFrom(java.io.InputStream input)29986     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseDelimitedFrom(java.io.InputStream input)
29987         throws java.io.IOException {
29988       return PARSER.parseDelimitedFrom(input);
29989     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29990     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseDelimitedFrom(
29991         java.io.InputStream input,
29992         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29993         throws java.io.IOException {
29994       return PARSER.parseDelimitedFrom(input, extensionRegistry);
29995     }
parseFrom( com.google.protobuf.CodedInputStream input)29996     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(
29997         com.google.protobuf.CodedInputStream input)
29998         throws java.io.IOException {
29999       return PARSER.parseFrom(input);
30000     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30001     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parseFrom(
30002         com.google.protobuf.CodedInputStream input,
30003         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30004         throws java.io.IOException {
30005       return PARSER.parseFrom(input, extensionRegistry);
30006     }
30007 
newBuilder()30008     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()30009     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest prototype)30010     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest prototype) {
30011       return newBuilder().mergeFrom(prototype);
30012     }
toBuilder()30013     public Builder toBuilder() { return newBuilder(this); }
30014 
30015     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)30016     protected Builder newBuilderForType(
30017         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30018       Builder builder = new Builder(parent);
30019       return builder;
30020     }
30021     /**
30022      * Protobuf type {@code RunCatalogScanRequest}
30023      */
30024     public static final class Builder extends
30025         com.google.protobuf.GeneratedMessage.Builder<Builder>
30026        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequestOrBuilder {
30027       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()30028           getDescriptor() {
30029         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_descriptor;
30030       }
30031 
30032       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()30033           internalGetFieldAccessorTable() {
30034         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_fieldAccessorTable
30035             .ensureFieldAccessorsInitialized(
30036                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.Builder.class);
30037       }
30038 
30039       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.newBuilder()
Builder()30040       private Builder() {
30041         maybeForceBuilderInitialization();
30042       }
30043 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)30044       private Builder(
30045           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30046         super(parent);
30047         maybeForceBuilderInitialization();
30048       }
maybeForceBuilderInitialization()30049       private void maybeForceBuilderInitialization() {
30050         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
30051         }
30052       }
create()30053       private static Builder create() {
30054         return new Builder();
30055       }
30056 
clear()30057       public Builder clear() {
30058         super.clear();
30059         return this;
30060       }
30061 
clone()30062       public Builder clone() {
30063         return create().mergeFrom(buildPartial());
30064       }
30065 
30066       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()30067           getDescriptorForType() {
30068         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanRequest_descriptor;
30069       }
30070 
getDefaultInstanceForType()30071       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest getDefaultInstanceForType() {
30072         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
30073       }
30074 
build()30075       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest build() {
30076         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest result = buildPartial();
30077         if (!result.isInitialized()) {
30078           throw newUninitializedMessageException(result);
30079         }
30080         return result;
30081       }
30082 
buildPartial()30083       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest buildPartial() {
30084         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest(this);
30085         onBuilt();
30086         return result;
30087       }
30088 
mergeFrom(com.google.protobuf.Message other)30089       public Builder mergeFrom(com.google.protobuf.Message other) {
30090         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest) {
30091           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)other);
30092         } else {
30093           super.mergeFrom(other);
30094           return this;
30095         }
30096       }
30097 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest other)30098       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest other) {
30099         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance()) return this;
30100         this.mergeUnknownFields(other.getUnknownFields());
30101         return this;
30102       }
30103 
isInitialized()30104       public final boolean isInitialized() {
30105         return true;
30106       }
30107 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30108       public Builder mergeFrom(
30109           com.google.protobuf.CodedInputStream input,
30110           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30111           throws java.io.IOException {
30112         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest parsedMessage = null;
30113         try {
30114           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
30115         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30116           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest) e.getUnfinishedMessage();
30117           throw e;
30118         } finally {
30119           if (parsedMessage != null) {
30120             mergeFrom(parsedMessage);
30121           }
30122         }
30123         return this;
30124       }
30125 
30126       // @@protoc_insertion_point(builder_scope:RunCatalogScanRequest)
30127     }
30128 
30129     static {
30130       defaultInstance = new RunCatalogScanRequest(true);
defaultInstance.initFields()30131       defaultInstance.initFields();
30132     }
30133 
30134     // @@protoc_insertion_point(class_scope:RunCatalogScanRequest)
30135   }
30136 
30137   public interface RunCatalogScanResponseOrBuilder
30138       extends com.google.protobuf.MessageOrBuilder {
30139 
30140     // optional int32 scan_result = 1;
30141     /**
30142      * <code>optional int32 scan_result = 1;</code>
30143      */
hasScanResult()30144     boolean hasScanResult();
30145     /**
30146      * <code>optional int32 scan_result = 1;</code>
30147      */
getScanResult()30148     int getScanResult();
30149   }
30150   /**
30151    * Protobuf type {@code RunCatalogScanResponse}
30152    */
30153   public static final class RunCatalogScanResponse extends
30154       com.google.protobuf.GeneratedMessage
30155       implements RunCatalogScanResponseOrBuilder {
30156     // Use RunCatalogScanResponse.newBuilder() to construct.
RunCatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)30157     private RunCatalogScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
30158       super(builder);
30159       this.unknownFields = builder.getUnknownFields();
30160     }
RunCatalogScanResponse(boolean noInit)30161     private RunCatalogScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
30162 
30163     private static final RunCatalogScanResponse defaultInstance;
getDefaultInstance()30164     public static RunCatalogScanResponse getDefaultInstance() {
30165       return defaultInstance;
30166     }
30167 
getDefaultInstanceForType()30168     public RunCatalogScanResponse getDefaultInstanceForType() {
30169       return defaultInstance;
30170     }
30171 
30172     private final com.google.protobuf.UnknownFieldSet unknownFields;
30173     @java.lang.Override
30174     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()30175         getUnknownFields() {
30176       return this.unknownFields;
30177     }
RunCatalogScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30178     private RunCatalogScanResponse(
30179         com.google.protobuf.CodedInputStream input,
30180         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30181         throws com.google.protobuf.InvalidProtocolBufferException {
30182       initFields();
30183       int mutable_bitField0_ = 0;
30184       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
30185           com.google.protobuf.UnknownFieldSet.newBuilder();
30186       try {
30187         boolean done = false;
30188         while (!done) {
30189           int tag = input.readTag();
30190           switch (tag) {
30191             case 0:
30192               done = true;
30193               break;
30194             default: {
30195               if (!parseUnknownField(input, unknownFields,
30196                                      extensionRegistry, tag)) {
30197                 done = true;
30198               }
30199               break;
30200             }
30201             case 8: {
30202               bitField0_ |= 0x00000001;
30203               scanResult_ = input.readInt32();
30204               break;
30205             }
30206           }
30207         }
30208       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30209         throw e.setUnfinishedMessage(this);
30210       } catch (java.io.IOException e) {
30211         throw new com.google.protobuf.InvalidProtocolBufferException(
30212             e.getMessage()).setUnfinishedMessage(this);
30213       } finally {
30214         this.unknownFields = unknownFields.build();
30215         makeExtensionsImmutable();
30216       }
30217     }
30218     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()30219         getDescriptor() {
30220       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_descriptor;
30221     }
30222 
30223     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()30224         internalGetFieldAccessorTable() {
30225       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_fieldAccessorTable
30226           .ensureFieldAccessorsInitialized(
30227               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.Builder.class);
30228     }
30229 
30230     public static com.google.protobuf.Parser<RunCatalogScanResponse> PARSER =
30231         new com.google.protobuf.AbstractParser<RunCatalogScanResponse>() {
30232       public RunCatalogScanResponse parsePartialFrom(
30233           com.google.protobuf.CodedInputStream input,
30234           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30235           throws com.google.protobuf.InvalidProtocolBufferException {
30236         return new RunCatalogScanResponse(input, extensionRegistry);
30237       }
30238     };
30239 
30240     @java.lang.Override
getParserForType()30241     public com.google.protobuf.Parser<RunCatalogScanResponse> getParserForType() {
30242       return PARSER;
30243     }
30244 
30245     private int bitField0_;
30246     // optional int32 scan_result = 1;
30247     public static final int SCAN_RESULT_FIELD_NUMBER = 1;
30248     private int scanResult_;
30249     /**
30250      * <code>optional int32 scan_result = 1;</code>
30251      */
hasScanResult()30252     public boolean hasScanResult() {
30253       return ((bitField0_ & 0x00000001) == 0x00000001);
30254     }
30255     /**
30256      * <code>optional int32 scan_result = 1;</code>
30257      */
getScanResult()30258     public int getScanResult() {
30259       return scanResult_;
30260     }
30261 
initFields()30262     private void initFields() {
30263       scanResult_ = 0;
30264     }
30265     private byte memoizedIsInitialized = -1;
isInitialized()30266     public final boolean isInitialized() {
30267       byte isInitialized = memoizedIsInitialized;
30268       if (isInitialized != -1) return isInitialized == 1;
30269 
30270       memoizedIsInitialized = 1;
30271       return true;
30272     }
30273 
writeTo(com.google.protobuf.CodedOutputStream output)30274     public void writeTo(com.google.protobuf.CodedOutputStream output)
30275                         throws java.io.IOException {
30276       getSerializedSize();
30277       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30278         output.writeInt32(1, scanResult_);
30279       }
30280       getUnknownFields().writeTo(output);
30281     }
30282 
30283     private int memoizedSerializedSize = -1;
getSerializedSize()30284     public int getSerializedSize() {
30285       int size = memoizedSerializedSize;
30286       if (size != -1) return size;
30287 
30288       size = 0;
30289       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30290         size += com.google.protobuf.CodedOutputStream
30291           .computeInt32Size(1, scanResult_);
30292       }
30293       size += getUnknownFields().getSerializedSize();
30294       memoizedSerializedSize = size;
30295       return size;
30296     }
30297 
30298     private static final long serialVersionUID = 0L;
30299     @java.lang.Override
writeReplace()30300     protected java.lang.Object writeReplace()
30301         throws java.io.ObjectStreamException {
30302       return super.writeReplace();
30303     }
30304 
30305     @java.lang.Override
equals(final java.lang.Object obj)30306     public boolean equals(final java.lang.Object obj) {
30307       if (obj == this) {
30308        return true;
30309       }
30310       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse)) {
30311         return super.equals(obj);
30312       }
30313       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) obj;
30314 
30315       boolean result = true;
30316       result = result && (hasScanResult() == other.hasScanResult());
30317       if (hasScanResult()) {
30318         result = result && (getScanResult()
30319             == other.getScanResult());
30320       }
30321       result = result &&
30322           getUnknownFields().equals(other.getUnknownFields());
30323       return result;
30324     }
30325 
30326     private int memoizedHashCode = 0;
30327     @java.lang.Override
hashCode()30328     public int hashCode() {
30329       if (memoizedHashCode != 0) {
30330         return memoizedHashCode;
30331       }
30332       int hash = 41;
30333       hash = (19 * hash) + getDescriptorForType().hashCode();
30334       if (hasScanResult()) {
30335         hash = (37 * hash) + SCAN_RESULT_FIELD_NUMBER;
30336         hash = (53 * hash) + getScanResult();
30337       }
30338       hash = (29 * hash) + getUnknownFields().hashCode();
30339       memoizedHashCode = hash;
30340       return hash;
30341     }
30342 
parseFrom( com.google.protobuf.ByteString data)30343     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(
30344         com.google.protobuf.ByteString data)
30345         throws com.google.protobuf.InvalidProtocolBufferException {
30346       return PARSER.parseFrom(data);
30347     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30348     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(
30349         com.google.protobuf.ByteString data,
30350         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30351         throws com.google.protobuf.InvalidProtocolBufferException {
30352       return PARSER.parseFrom(data, extensionRegistry);
30353     }
parseFrom(byte[] data)30354     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(byte[] data)
30355         throws com.google.protobuf.InvalidProtocolBufferException {
30356       return PARSER.parseFrom(data);
30357     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30358     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(
30359         byte[] data,
30360         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30361         throws com.google.protobuf.InvalidProtocolBufferException {
30362       return PARSER.parseFrom(data, extensionRegistry);
30363     }
parseFrom(java.io.InputStream input)30364     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(java.io.InputStream input)
30365         throws java.io.IOException {
30366       return PARSER.parseFrom(input);
30367     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30368     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(
30369         java.io.InputStream input,
30370         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30371         throws java.io.IOException {
30372       return PARSER.parseFrom(input, extensionRegistry);
30373     }
parseDelimitedFrom(java.io.InputStream input)30374     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseDelimitedFrom(java.io.InputStream input)
30375         throws java.io.IOException {
30376       return PARSER.parseDelimitedFrom(input);
30377     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30378     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseDelimitedFrom(
30379         java.io.InputStream input,
30380         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30381         throws java.io.IOException {
30382       return PARSER.parseDelimitedFrom(input, extensionRegistry);
30383     }
parseFrom( com.google.protobuf.CodedInputStream input)30384     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(
30385         com.google.protobuf.CodedInputStream input)
30386         throws java.io.IOException {
30387       return PARSER.parseFrom(input);
30388     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30389     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parseFrom(
30390         com.google.protobuf.CodedInputStream input,
30391         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30392         throws java.io.IOException {
30393       return PARSER.parseFrom(input, extensionRegistry);
30394     }
30395 
newBuilder()30396     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()30397     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse prototype)30398     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse prototype) {
30399       return newBuilder().mergeFrom(prototype);
30400     }
toBuilder()30401     public Builder toBuilder() { return newBuilder(this); }
30402 
30403     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)30404     protected Builder newBuilderForType(
30405         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30406       Builder builder = new Builder(parent);
30407       return builder;
30408     }
30409     /**
30410      * Protobuf type {@code RunCatalogScanResponse}
30411      */
30412     public static final class Builder extends
30413         com.google.protobuf.GeneratedMessage.Builder<Builder>
30414        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponseOrBuilder {
30415       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()30416           getDescriptor() {
30417         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_descriptor;
30418       }
30419 
30420       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()30421           internalGetFieldAccessorTable() {
30422         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_fieldAccessorTable
30423             .ensureFieldAccessorsInitialized(
30424                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.Builder.class);
30425       }
30426 
30427       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.newBuilder()
Builder()30428       private Builder() {
30429         maybeForceBuilderInitialization();
30430       }
30431 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)30432       private Builder(
30433           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30434         super(parent);
30435         maybeForceBuilderInitialization();
30436       }
maybeForceBuilderInitialization()30437       private void maybeForceBuilderInitialization() {
30438         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
30439         }
30440       }
create()30441       private static Builder create() {
30442         return new Builder();
30443       }
30444 
clear()30445       public Builder clear() {
30446         super.clear();
30447         scanResult_ = 0;
30448         bitField0_ = (bitField0_ & ~0x00000001);
30449         return this;
30450       }
30451 
clone()30452       public Builder clone() {
30453         return create().mergeFrom(buildPartial());
30454       }
30455 
30456       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()30457           getDescriptorForType() {
30458         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RunCatalogScanResponse_descriptor;
30459       }
30460 
getDefaultInstanceForType()30461       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse getDefaultInstanceForType() {
30462         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
30463       }
30464 
build()30465       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse build() {
30466         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse result = buildPartial();
30467         if (!result.isInitialized()) {
30468           throw newUninitializedMessageException(result);
30469         }
30470         return result;
30471       }
30472 
buildPartial()30473       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse buildPartial() {
30474         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse(this);
30475         int from_bitField0_ = bitField0_;
30476         int to_bitField0_ = 0;
30477         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
30478           to_bitField0_ |= 0x00000001;
30479         }
30480         result.scanResult_ = scanResult_;
30481         result.bitField0_ = to_bitField0_;
30482         onBuilt();
30483         return result;
30484       }
30485 
mergeFrom(com.google.protobuf.Message other)30486       public Builder mergeFrom(com.google.protobuf.Message other) {
30487         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) {
30488           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse)other);
30489         } else {
30490           super.mergeFrom(other);
30491           return this;
30492         }
30493       }
30494 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse other)30495       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse other) {
30496         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance()) return this;
30497         if (other.hasScanResult()) {
30498           setScanResult(other.getScanResult());
30499         }
30500         this.mergeUnknownFields(other.getUnknownFields());
30501         return this;
30502       }
30503 
isInitialized()30504       public final boolean isInitialized() {
30505         return true;
30506       }
30507 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30508       public Builder mergeFrom(
30509           com.google.protobuf.CodedInputStream input,
30510           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30511           throws java.io.IOException {
30512         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse parsedMessage = null;
30513         try {
30514           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
30515         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30516           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) e.getUnfinishedMessage();
30517           throw e;
30518         } finally {
30519           if (parsedMessage != null) {
30520             mergeFrom(parsedMessage);
30521           }
30522         }
30523         return this;
30524       }
30525       private int bitField0_;
30526 
30527       // optional int32 scan_result = 1;
30528       private int scanResult_ ;
30529       /**
30530        * <code>optional int32 scan_result = 1;</code>
30531        */
hasScanResult()30532       public boolean hasScanResult() {
30533         return ((bitField0_ & 0x00000001) == 0x00000001);
30534       }
30535       /**
30536        * <code>optional int32 scan_result = 1;</code>
30537        */
getScanResult()30538       public int getScanResult() {
30539         return scanResult_;
30540       }
30541       /**
30542        * <code>optional int32 scan_result = 1;</code>
30543        */
setScanResult(int value)30544       public Builder setScanResult(int value) {
30545         bitField0_ |= 0x00000001;
30546         scanResult_ = value;
30547         onChanged();
30548         return this;
30549       }
30550       /**
30551        * <code>optional int32 scan_result = 1;</code>
30552        */
clearScanResult()30553       public Builder clearScanResult() {
30554         bitField0_ = (bitField0_ & ~0x00000001);
30555         scanResult_ = 0;
30556         onChanged();
30557         return this;
30558       }
30559 
30560       // @@protoc_insertion_point(builder_scope:RunCatalogScanResponse)
30561     }
30562 
30563     static {
30564       defaultInstance = new RunCatalogScanResponse(true);
defaultInstance.initFields()30565       defaultInstance.initFields();
30566     }
30567 
30568     // @@protoc_insertion_point(class_scope:RunCatalogScanResponse)
30569   }
30570 
30571   public interface EnableCatalogJanitorRequestOrBuilder
30572       extends com.google.protobuf.MessageOrBuilder {
30573 
30574     // required bool enable = 1;
30575     /**
30576      * <code>required bool enable = 1;</code>
30577      */
hasEnable()30578     boolean hasEnable();
30579     /**
30580      * <code>required bool enable = 1;</code>
30581      */
getEnable()30582     boolean getEnable();
30583   }
30584   /**
30585    * Protobuf type {@code EnableCatalogJanitorRequest}
30586    */
30587   public static final class EnableCatalogJanitorRequest extends
30588       com.google.protobuf.GeneratedMessage
30589       implements EnableCatalogJanitorRequestOrBuilder {
30590     // Use EnableCatalogJanitorRequest.newBuilder() to construct.
EnableCatalogJanitorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)30591     private EnableCatalogJanitorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
30592       super(builder);
30593       this.unknownFields = builder.getUnknownFields();
30594     }
EnableCatalogJanitorRequest(boolean noInit)30595     private EnableCatalogJanitorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
30596 
30597     private static final EnableCatalogJanitorRequest defaultInstance;
getDefaultInstance()30598     public static EnableCatalogJanitorRequest getDefaultInstance() {
30599       return defaultInstance;
30600     }
30601 
getDefaultInstanceForType()30602     public EnableCatalogJanitorRequest getDefaultInstanceForType() {
30603       return defaultInstance;
30604     }
30605 
30606     private final com.google.protobuf.UnknownFieldSet unknownFields;
30607     @java.lang.Override
30608     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()30609         getUnknownFields() {
30610       return this.unknownFields;
30611     }
EnableCatalogJanitorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30612     private EnableCatalogJanitorRequest(
30613         com.google.protobuf.CodedInputStream input,
30614         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30615         throws com.google.protobuf.InvalidProtocolBufferException {
30616       initFields();
30617       int mutable_bitField0_ = 0;
30618       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
30619           com.google.protobuf.UnknownFieldSet.newBuilder();
30620       try {
30621         boolean done = false;
30622         while (!done) {
30623           int tag = input.readTag();
30624           switch (tag) {
30625             case 0:
30626               done = true;
30627               break;
30628             default: {
30629               if (!parseUnknownField(input, unknownFields,
30630                                      extensionRegistry, tag)) {
30631                 done = true;
30632               }
30633               break;
30634             }
30635             case 8: {
30636               bitField0_ |= 0x00000001;
30637               enable_ = input.readBool();
30638               break;
30639             }
30640           }
30641         }
30642       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30643         throw e.setUnfinishedMessage(this);
30644       } catch (java.io.IOException e) {
30645         throw new com.google.protobuf.InvalidProtocolBufferException(
30646             e.getMessage()).setUnfinishedMessage(this);
30647       } finally {
30648         this.unknownFields = unknownFields.build();
30649         makeExtensionsImmutable();
30650       }
30651     }
30652     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()30653         getDescriptor() {
30654       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_descriptor;
30655     }
30656 
30657     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()30658         internalGetFieldAccessorTable() {
30659       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable
30660           .ensureFieldAccessorsInitialized(
30661               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.Builder.class);
30662     }
30663 
30664     public static com.google.protobuf.Parser<EnableCatalogJanitorRequest> PARSER =
30665         new com.google.protobuf.AbstractParser<EnableCatalogJanitorRequest>() {
30666       public EnableCatalogJanitorRequest parsePartialFrom(
30667           com.google.protobuf.CodedInputStream input,
30668           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30669           throws com.google.protobuf.InvalidProtocolBufferException {
30670         return new EnableCatalogJanitorRequest(input, extensionRegistry);
30671       }
30672     };
30673 
30674     @java.lang.Override
getParserForType()30675     public com.google.protobuf.Parser<EnableCatalogJanitorRequest> getParserForType() {
30676       return PARSER;
30677     }
30678 
30679     private int bitField0_;
30680     // required bool enable = 1;
30681     public static final int ENABLE_FIELD_NUMBER = 1;
30682     private boolean enable_;
30683     /**
30684      * <code>required bool enable = 1;</code>
30685      */
hasEnable()30686     public boolean hasEnable() {
30687       return ((bitField0_ & 0x00000001) == 0x00000001);
30688     }
30689     /**
30690      * <code>required bool enable = 1;</code>
30691      */
getEnable()30692     public boolean getEnable() {
30693       return enable_;
30694     }
30695 
initFields()30696     private void initFields() {
30697       enable_ = false;
30698     }
30699     private byte memoizedIsInitialized = -1;
isInitialized()30700     public final boolean isInitialized() {
30701       byte isInitialized = memoizedIsInitialized;
30702       if (isInitialized != -1) return isInitialized == 1;
30703 
30704       if (!hasEnable()) {
30705         memoizedIsInitialized = 0;
30706         return false;
30707       }
30708       memoizedIsInitialized = 1;
30709       return true;
30710     }
30711 
writeTo(com.google.protobuf.CodedOutputStream output)30712     public void writeTo(com.google.protobuf.CodedOutputStream output)
30713                         throws java.io.IOException {
30714       getSerializedSize();
30715       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30716         output.writeBool(1, enable_);
30717       }
30718       getUnknownFields().writeTo(output);
30719     }
30720 
30721     private int memoizedSerializedSize = -1;
getSerializedSize()30722     public int getSerializedSize() {
30723       int size = memoizedSerializedSize;
30724       if (size != -1) return size;
30725 
30726       size = 0;
30727       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30728         size += com.google.protobuf.CodedOutputStream
30729           .computeBoolSize(1, enable_);
30730       }
30731       size += getUnknownFields().getSerializedSize();
30732       memoizedSerializedSize = size;
30733       return size;
30734     }
30735 
30736     private static final long serialVersionUID = 0L;
30737     @java.lang.Override
writeReplace()30738     protected java.lang.Object writeReplace()
30739         throws java.io.ObjectStreamException {
30740       return super.writeReplace();
30741     }
30742 
30743     @java.lang.Override
equals(final java.lang.Object obj)30744     public boolean equals(final java.lang.Object obj) {
30745       if (obj == this) {
30746        return true;
30747       }
30748       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)) {
30749         return super.equals(obj);
30750       }
30751       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) obj;
30752 
30753       boolean result = true;
30754       result = result && (hasEnable() == other.hasEnable());
30755       if (hasEnable()) {
30756         result = result && (getEnable()
30757             == other.getEnable());
30758       }
30759       result = result &&
30760           getUnknownFields().equals(other.getUnknownFields());
30761       return result;
30762     }
30763 
30764     private int memoizedHashCode = 0;
30765     @java.lang.Override
hashCode()30766     public int hashCode() {
30767       if (memoizedHashCode != 0) {
30768         return memoizedHashCode;
30769       }
30770       int hash = 41;
30771       hash = (19 * hash) + getDescriptorForType().hashCode();
30772       if (hasEnable()) {
30773         hash = (37 * hash) + ENABLE_FIELD_NUMBER;
30774         hash = (53 * hash) + hashBoolean(getEnable());
30775       }
30776       hash = (29 * hash) + getUnknownFields().hashCode();
30777       memoizedHashCode = hash;
30778       return hash;
30779     }
30780 
parseFrom( com.google.protobuf.ByteString data)30781     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(
30782         com.google.protobuf.ByteString data)
30783         throws com.google.protobuf.InvalidProtocolBufferException {
30784       return PARSER.parseFrom(data);
30785     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30786     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(
30787         com.google.protobuf.ByteString data,
30788         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30789         throws com.google.protobuf.InvalidProtocolBufferException {
30790       return PARSER.parseFrom(data, extensionRegistry);
30791     }
parseFrom(byte[] data)30792     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(byte[] data)
30793         throws com.google.protobuf.InvalidProtocolBufferException {
30794       return PARSER.parseFrom(data);
30795     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30796     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(
30797         byte[] data,
30798         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30799         throws com.google.protobuf.InvalidProtocolBufferException {
30800       return PARSER.parseFrom(data, extensionRegistry);
30801     }
parseFrom(java.io.InputStream input)30802     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(java.io.InputStream input)
30803         throws java.io.IOException {
30804       return PARSER.parseFrom(input);
30805     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30806     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(
30807         java.io.InputStream input,
30808         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30809         throws java.io.IOException {
30810       return PARSER.parseFrom(input, extensionRegistry);
30811     }
parseDelimitedFrom(java.io.InputStream input)30812     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseDelimitedFrom(java.io.InputStream input)
30813         throws java.io.IOException {
30814       return PARSER.parseDelimitedFrom(input);
30815     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30816     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseDelimitedFrom(
30817         java.io.InputStream input,
30818         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30819         throws java.io.IOException {
30820       return PARSER.parseDelimitedFrom(input, extensionRegistry);
30821     }
parseFrom( com.google.protobuf.CodedInputStream input)30822     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(
30823         com.google.protobuf.CodedInputStream input)
30824         throws java.io.IOException {
30825       return PARSER.parseFrom(input);
30826     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30827     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parseFrom(
30828         com.google.protobuf.CodedInputStream input,
30829         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30830         throws java.io.IOException {
30831       return PARSER.parseFrom(input, extensionRegistry);
30832     }
30833 
newBuilder()30834     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()30835     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest prototype)30836     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest prototype) {
30837       return newBuilder().mergeFrom(prototype);
30838     }
toBuilder()30839     public Builder toBuilder() { return newBuilder(this); }
30840 
30841     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)30842     protected Builder newBuilderForType(
30843         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30844       Builder builder = new Builder(parent);
30845       return builder;
30846     }
30847     /**
30848      * Protobuf type {@code EnableCatalogJanitorRequest}
30849      */
30850     public static final class Builder extends
30851         com.google.protobuf.GeneratedMessage.Builder<Builder>
30852        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequestOrBuilder {
30853       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()30854           getDescriptor() {
30855         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_descriptor;
30856       }
30857 
30858       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()30859           internalGetFieldAccessorTable() {
30860         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_fieldAccessorTable
30861             .ensureFieldAccessorsInitialized(
30862                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.Builder.class);
30863       }
30864 
30865       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.newBuilder()
Builder()30866       private Builder() {
30867         maybeForceBuilderInitialization();
30868       }
30869 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)30870       private Builder(
30871           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
30872         super(parent);
30873         maybeForceBuilderInitialization();
30874       }
maybeForceBuilderInitialization()30875       private void maybeForceBuilderInitialization() {
30876         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
30877         }
30878       }
create()30879       private static Builder create() {
30880         return new Builder();
30881       }
30882 
clear()30883       public Builder clear() {
30884         super.clear();
30885         enable_ = false;
30886         bitField0_ = (bitField0_ & ~0x00000001);
30887         return this;
30888       }
30889 
clone()30890       public Builder clone() {
30891         return create().mergeFrom(buildPartial());
30892       }
30893 
30894       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()30895           getDescriptorForType() {
30896         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorRequest_descriptor;
30897       }
30898 
getDefaultInstanceForType()30899       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest getDefaultInstanceForType() {
30900         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
30901       }
30902 
build()30903       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest build() {
30904         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest result = buildPartial();
30905         if (!result.isInitialized()) {
30906           throw newUninitializedMessageException(result);
30907         }
30908         return result;
30909       }
30910 
buildPartial()30911       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest buildPartial() {
30912         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest(this);
30913         int from_bitField0_ = bitField0_;
30914         int to_bitField0_ = 0;
30915         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
30916           to_bitField0_ |= 0x00000001;
30917         }
30918         result.enable_ = enable_;
30919         result.bitField0_ = to_bitField0_;
30920         onBuilt();
30921         return result;
30922       }
30923 
mergeFrom(com.google.protobuf.Message other)30924       public Builder mergeFrom(com.google.protobuf.Message other) {
30925         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) {
30926           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)other);
30927         } else {
30928           super.mergeFrom(other);
30929           return this;
30930         }
30931       }
30932 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest other)30933       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest other) {
30934         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance()) return this;
30935         if (other.hasEnable()) {
30936           setEnable(other.getEnable());
30937         }
30938         this.mergeUnknownFields(other.getUnknownFields());
30939         return this;
30940       }
30941 
isInitialized()30942       public final boolean isInitialized() {
30943         if (!hasEnable()) {
30944 
30945           return false;
30946         }
30947         return true;
30948       }
30949 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30950       public Builder mergeFrom(
30951           com.google.protobuf.CodedInputStream input,
30952           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30953           throws java.io.IOException {
30954         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest parsedMessage = null;
30955         try {
30956           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
30957         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30958           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest) e.getUnfinishedMessage();
30959           throw e;
30960         } finally {
30961           if (parsedMessage != null) {
30962             mergeFrom(parsedMessage);
30963           }
30964         }
30965         return this;
30966       }
30967       private int bitField0_;
30968 
30969       // required bool enable = 1;
30970       private boolean enable_ ;
30971       /**
30972        * <code>required bool enable = 1;</code>
30973        */
hasEnable()30974       public boolean hasEnable() {
30975         return ((bitField0_ & 0x00000001) == 0x00000001);
30976       }
30977       /**
30978        * <code>required bool enable = 1;</code>
30979        */
getEnable()30980       public boolean getEnable() {
30981         return enable_;
30982       }
30983       /**
30984        * <code>required bool enable = 1;</code>
30985        */
setEnable(boolean value)30986       public Builder setEnable(boolean value) {
30987         bitField0_ |= 0x00000001;
30988         enable_ = value;
30989         onChanged();
30990         return this;
30991       }
30992       /**
30993        * <code>required bool enable = 1;</code>
30994        */
clearEnable()30995       public Builder clearEnable() {
30996         bitField0_ = (bitField0_ & ~0x00000001);
30997         enable_ = false;
30998         onChanged();
30999         return this;
31000       }
31001 
31002       // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorRequest)
31003     }
31004 
31005     static {
31006       defaultInstance = new EnableCatalogJanitorRequest(true);
defaultInstance.initFields()31007       defaultInstance.initFields();
31008     }
31009 
31010     // @@protoc_insertion_point(class_scope:EnableCatalogJanitorRequest)
31011   }
31012 
31013   public interface EnableCatalogJanitorResponseOrBuilder
31014       extends com.google.protobuf.MessageOrBuilder {
31015 
31016     // optional bool prev_value = 1;
31017     /**
31018      * <code>optional bool prev_value = 1;</code>
31019      */
hasPrevValue()31020     boolean hasPrevValue();
31021     /**
31022      * <code>optional bool prev_value = 1;</code>
31023      */
getPrevValue()31024     boolean getPrevValue();
31025   }
31026   /**
31027    * Protobuf type {@code EnableCatalogJanitorResponse}
31028    */
31029   public static final class EnableCatalogJanitorResponse extends
31030       com.google.protobuf.GeneratedMessage
31031       implements EnableCatalogJanitorResponseOrBuilder {
31032     // Use EnableCatalogJanitorResponse.newBuilder() to construct.
EnableCatalogJanitorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)31033     private EnableCatalogJanitorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
31034       super(builder);
31035       this.unknownFields = builder.getUnknownFields();
31036     }
EnableCatalogJanitorResponse(boolean noInit)31037     private EnableCatalogJanitorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
31038 
31039     private static final EnableCatalogJanitorResponse defaultInstance;
getDefaultInstance()31040     public static EnableCatalogJanitorResponse getDefaultInstance() {
31041       return defaultInstance;
31042     }
31043 
getDefaultInstanceForType()31044     public EnableCatalogJanitorResponse getDefaultInstanceForType() {
31045       return defaultInstance;
31046     }
31047 
31048     private final com.google.protobuf.UnknownFieldSet unknownFields;
31049     @java.lang.Override
31050     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()31051         getUnknownFields() {
31052       return this.unknownFields;
31053     }
EnableCatalogJanitorResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31054     private EnableCatalogJanitorResponse(
31055         com.google.protobuf.CodedInputStream input,
31056         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31057         throws com.google.protobuf.InvalidProtocolBufferException {
31058       initFields();
31059       int mutable_bitField0_ = 0;
31060       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
31061           com.google.protobuf.UnknownFieldSet.newBuilder();
31062       try {
31063         boolean done = false;
31064         while (!done) {
31065           int tag = input.readTag();
31066           switch (tag) {
31067             case 0:
31068               done = true;
31069               break;
31070             default: {
31071               if (!parseUnknownField(input, unknownFields,
31072                                      extensionRegistry, tag)) {
31073                 done = true;
31074               }
31075               break;
31076             }
31077             case 8: {
31078               bitField0_ |= 0x00000001;
31079               prevValue_ = input.readBool();
31080               break;
31081             }
31082           }
31083         }
31084       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31085         throw e.setUnfinishedMessage(this);
31086       } catch (java.io.IOException e) {
31087         throw new com.google.protobuf.InvalidProtocolBufferException(
31088             e.getMessage()).setUnfinishedMessage(this);
31089       } finally {
31090         this.unknownFields = unknownFields.build();
31091         makeExtensionsImmutable();
31092       }
31093     }
31094     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31095         getDescriptor() {
31096       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_descriptor;
31097     }
31098 
31099     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31100         internalGetFieldAccessorTable() {
31101       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable
31102           .ensureFieldAccessorsInitialized(
31103               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.Builder.class);
31104     }
31105 
31106     public static com.google.protobuf.Parser<EnableCatalogJanitorResponse> PARSER =
31107         new com.google.protobuf.AbstractParser<EnableCatalogJanitorResponse>() {
31108       public EnableCatalogJanitorResponse parsePartialFrom(
31109           com.google.protobuf.CodedInputStream input,
31110           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31111           throws com.google.protobuf.InvalidProtocolBufferException {
31112         return new EnableCatalogJanitorResponse(input, extensionRegistry);
31113       }
31114     };
31115 
31116     @java.lang.Override
getParserForType()31117     public com.google.protobuf.Parser<EnableCatalogJanitorResponse> getParserForType() {
31118       return PARSER;
31119     }
31120 
31121     private int bitField0_;
31122     // optional bool prev_value = 1;
31123     public static final int PREV_VALUE_FIELD_NUMBER = 1;
31124     private boolean prevValue_;
31125     /**
31126      * <code>optional bool prev_value = 1;</code>
31127      */
hasPrevValue()31128     public boolean hasPrevValue() {
31129       return ((bitField0_ & 0x00000001) == 0x00000001);
31130     }
31131     /**
31132      * <code>optional bool prev_value = 1;</code>
31133      */
getPrevValue()31134     public boolean getPrevValue() {
31135       return prevValue_;
31136     }
31137 
initFields()31138     private void initFields() {
31139       prevValue_ = false;
31140     }
31141     private byte memoizedIsInitialized = -1;
isInitialized()31142     public final boolean isInitialized() {
31143       byte isInitialized = memoizedIsInitialized;
31144       if (isInitialized != -1) return isInitialized == 1;
31145 
31146       memoizedIsInitialized = 1;
31147       return true;
31148     }
31149 
writeTo(com.google.protobuf.CodedOutputStream output)31150     public void writeTo(com.google.protobuf.CodedOutputStream output)
31151                         throws java.io.IOException {
31152       getSerializedSize();
31153       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31154         output.writeBool(1, prevValue_);
31155       }
31156       getUnknownFields().writeTo(output);
31157     }
31158 
31159     private int memoizedSerializedSize = -1;
getSerializedSize()31160     public int getSerializedSize() {
31161       int size = memoizedSerializedSize;
31162       if (size != -1) return size;
31163 
31164       size = 0;
31165       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31166         size += com.google.protobuf.CodedOutputStream
31167           .computeBoolSize(1, prevValue_);
31168       }
31169       size += getUnknownFields().getSerializedSize();
31170       memoizedSerializedSize = size;
31171       return size;
31172     }
31173 
31174     private static final long serialVersionUID = 0L;
31175     @java.lang.Override
writeReplace()31176     protected java.lang.Object writeReplace()
31177         throws java.io.ObjectStreamException {
31178       return super.writeReplace();
31179     }
31180 
31181     @java.lang.Override
equals(final java.lang.Object obj)31182     public boolean equals(final java.lang.Object obj) {
31183       if (obj == this) {
31184        return true;
31185       }
31186       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse)) {
31187         return super.equals(obj);
31188       }
31189       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) obj;
31190 
31191       boolean result = true;
31192       result = result && (hasPrevValue() == other.hasPrevValue());
31193       if (hasPrevValue()) {
31194         result = result && (getPrevValue()
31195             == other.getPrevValue());
31196       }
31197       result = result &&
31198           getUnknownFields().equals(other.getUnknownFields());
31199       return result;
31200     }
31201 
31202     private int memoizedHashCode = 0;
31203     @java.lang.Override
hashCode()31204     public int hashCode() {
31205       if (memoizedHashCode != 0) {
31206         return memoizedHashCode;
31207       }
31208       int hash = 41;
31209       hash = (19 * hash) + getDescriptorForType().hashCode();
31210       if (hasPrevValue()) {
31211         hash = (37 * hash) + PREV_VALUE_FIELD_NUMBER;
31212         hash = (53 * hash) + hashBoolean(getPrevValue());
31213       }
31214       hash = (29 * hash) + getUnknownFields().hashCode();
31215       memoizedHashCode = hash;
31216       return hash;
31217     }
31218 
parseFrom( com.google.protobuf.ByteString data)31219     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(
31220         com.google.protobuf.ByteString data)
31221         throws com.google.protobuf.InvalidProtocolBufferException {
31222       return PARSER.parseFrom(data);
31223     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31224     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(
31225         com.google.protobuf.ByteString data,
31226         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31227         throws com.google.protobuf.InvalidProtocolBufferException {
31228       return PARSER.parseFrom(data, extensionRegistry);
31229     }
parseFrom(byte[] data)31230     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(byte[] data)
31231         throws com.google.protobuf.InvalidProtocolBufferException {
31232       return PARSER.parseFrom(data);
31233     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31234     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(
31235         byte[] data,
31236         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31237         throws com.google.protobuf.InvalidProtocolBufferException {
31238       return PARSER.parseFrom(data, extensionRegistry);
31239     }
parseFrom(java.io.InputStream input)31240     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(java.io.InputStream input)
31241         throws java.io.IOException {
31242       return PARSER.parseFrom(input);
31243     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31244     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(
31245         java.io.InputStream input,
31246         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31247         throws java.io.IOException {
31248       return PARSER.parseFrom(input, extensionRegistry);
31249     }
parseDelimitedFrom(java.io.InputStream input)31250     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseDelimitedFrom(java.io.InputStream input)
31251         throws java.io.IOException {
31252       return PARSER.parseDelimitedFrom(input);
31253     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31254     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseDelimitedFrom(
31255         java.io.InputStream input,
31256         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31257         throws java.io.IOException {
31258       return PARSER.parseDelimitedFrom(input, extensionRegistry);
31259     }
parseFrom( com.google.protobuf.CodedInputStream input)31260     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(
31261         com.google.protobuf.CodedInputStream input)
31262         throws java.io.IOException {
31263       return PARSER.parseFrom(input);
31264     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31265     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parseFrom(
31266         com.google.protobuf.CodedInputStream input,
31267         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31268         throws java.io.IOException {
31269       return PARSER.parseFrom(input, extensionRegistry);
31270     }
31271 
newBuilder()31272     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()31273     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse prototype)31274     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse prototype) {
31275       return newBuilder().mergeFrom(prototype);
31276     }
toBuilder()31277     public Builder toBuilder() { return newBuilder(this); }
31278 
31279     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)31280     protected Builder newBuilderForType(
31281         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31282       Builder builder = new Builder(parent);
31283       return builder;
31284     }
31285     /**
31286      * Protobuf type {@code EnableCatalogJanitorResponse}
31287      */
31288     public static final class Builder extends
31289         com.google.protobuf.GeneratedMessage.Builder<Builder>
31290        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponseOrBuilder {
31291       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31292           getDescriptor() {
31293         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_descriptor;
31294       }
31295 
31296       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31297           internalGetFieldAccessorTable() {
31298         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_fieldAccessorTable
31299             .ensureFieldAccessorsInitialized(
31300                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.Builder.class);
31301       }
31302 
31303       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.newBuilder()
Builder()31304       private Builder() {
31305         maybeForceBuilderInitialization();
31306       }
31307 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)31308       private Builder(
31309           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31310         super(parent);
31311         maybeForceBuilderInitialization();
31312       }
maybeForceBuilderInitialization()31313       private void maybeForceBuilderInitialization() {
31314         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
31315         }
31316       }
create()31317       private static Builder create() {
31318         return new Builder();
31319       }
31320 
clear()31321       public Builder clear() {
31322         super.clear();
31323         prevValue_ = false;
31324         bitField0_ = (bitField0_ & ~0x00000001);
31325         return this;
31326       }
31327 
clone()31328       public Builder clone() {
31329         return create().mergeFrom(buildPartial());
31330       }
31331 
31332       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()31333           getDescriptorForType() {
31334         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_EnableCatalogJanitorResponse_descriptor;
31335       }
31336 
getDefaultInstanceForType()31337       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse getDefaultInstanceForType() {
31338         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
31339       }
31340 
build()31341       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse build() {
31342         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse result = buildPartial();
31343         if (!result.isInitialized()) {
31344           throw newUninitializedMessageException(result);
31345         }
31346         return result;
31347       }
31348 
buildPartial()31349       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse buildPartial() {
31350         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse(this);
31351         int from_bitField0_ = bitField0_;
31352         int to_bitField0_ = 0;
31353         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
31354           to_bitField0_ |= 0x00000001;
31355         }
31356         result.prevValue_ = prevValue_;
31357         result.bitField0_ = to_bitField0_;
31358         onBuilt();
31359         return result;
31360       }
31361 
mergeFrom(com.google.protobuf.Message other)31362       public Builder mergeFrom(com.google.protobuf.Message other) {
31363         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) {
31364           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse)other);
31365         } else {
31366           super.mergeFrom(other);
31367           return this;
31368         }
31369       }
31370 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse other)31371       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse other) {
31372         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance()) return this;
31373         if (other.hasPrevValue()) {
31374           setPrevValue(other.getPrevValue());
31375         }
31376         this.mergeUnknownFields(other.getUnknownFields());
31377         return this;
31378       }
31379 
isInitialized()31380       public final boolean isInitialized() {
31381         return true;
31382       }
31383 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31384       public Builder mergeFrom(
31385           com.google.protobuf.CodedInputStream input,
31386           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31387           throws java.io.IOException {
31388         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse parsedMessage = null;
31389         try {
31390           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
31391         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31392           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) e.getUnfinishedMessage();
31393           throw e;
31394         } finally {
31395           if (parsedMessage != null) {
31396             mergeFrom(parsedMessage);
31397           }
31398         }
31399         return this;
31400       }
31401       private int bitField0_;
31402 
31403       // optional bool prev_value = 1;
31404       private boolean prevValue_ ;
31405       /**
31406        * <code>optional bool prev_value = 1;</code>
31407        */
hasPrevValue()31408       public boolean hasPrevValue() {
31409         return ((bitField0_ & 0x00000001) == 0x00000001);
31410       }
31411       /**
31412        * <code>optional bool prev_value = 1;</code>
31413        */
getPrevValue()31414       public boolean getPrevValue() {
31415         return prevValue_;
31416       }
31417       /**
31418        * <code>optional bool prev_value = 1;</code>
31419        */
setPrevValue(boolean value)31420       public Builder setPrevValue(boolean value) {
31421         bitField0_ |= 0x00000001;
31422         prevValue_ = value;
31423         onChanged();
31424         return this;
31425       }
31426       /**
31427        * <code>optional bool prev_value = 1;</code>
31428        */
clearPrevValue()31429       public Builder clearPrevValue() {
31430         bitField0_ = (bitField0_ & ~0x00000001);
31431         prevValue_ = false;
31432         onChanged();
31433         return this;
31434       }
31435 
31436       // @@protoc_insertion_point(builder_scope:EnableCatalogJanitorResponse)
31437     }
31438 
31439     static {
31440       defaultInstance = new EnableCatalogJanitorResponse(true);
defaultInstance.initFields()31441       defaultInstance.initFields();
31442     }
31443 
31444     // @@protoc_insertion_point(class_scope:EnableCatalogJanitorResponse)
31445   }
31446 
31447   public interface IsCatalogJanitorEnabledRequestOrBuilder
31448       extends com.google.protobuf.MessageOrBuilder {
31449   }
31450   /**
31451    * Protobuf type {@code IsCatalogJanitorEnabledRequest}
31452    */
31453   public static final class IsCatalogJanitorEnabledRequest extends
31454       com.google.protobuf.GeneratedMessage
31455       implements IsCatalogJanitorEnabledRequestOrBuilder {
31456     // Use IsCatalogJanitorEnabledRequest.newBuilder() to construct.
IsCatalogJanitorEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)31457     private IsCatalogJanitorEnabledRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
31458       super(builder);
31459       this.unknownFields = builder.getUnknownFields();
31460     }
IsCatalogJanitorEnabledRequest(boolean noInit)31461     private IsCatalogJanitorEnabledRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
31462 
31463     private static final IsCatalogJanitorEnabledRequest defaultInstance;
getDefaultInstance()31464     public static IsCatalogJanitorEnabledRequest getDefaultInstance() {
31465       return defaultInstance;
31466     }
31467 
getDefaultInstanceForType()31468     public IsCatalogJanitorEnabledRequest getDefaultInstanceForType() {
31469       return defaultInstance;
31470     }
31471 
31472     private final com.google.protobuf.UnknownFieldSet unknownFields;
31473     @java.lang.Override
31474     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()31475         getUnknownFields() {
31476       return this.unknownFields;
31477     }
IsCatalogJanitorEnabledRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31478     private IsCatalogJanitorEnabledRequest(
31479         com.google.protobuf.CodedInputStream input,
31480         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31481         throws com.google.protobuf.InvalidProtocolBufferException {
31482       initFields();
31483       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
31484           com.google.protobuf.UnknownFieldSet.newBuilder();
31485       try {
31486         boolean done = false;
31487         while (!done) {
31488           int tag = input.readTag();
31489           switch (tag) {
31490             case 0:
31491               done = true;
31492               break;
31493             default: {
31494               if (!parseUnknownField(input, unknownFields,
31495                                      extensionRegistry, tag)) {
31496                 done = true;
31497               }
31498               break;
31499             }
31500           }
31501         }
31502       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31503         throw e.setUnfinishedMessage(this);
31504       } catch (java.io.IOException e) {
31505         throw new com.google.protobuf.InvalidProtocolBufferException(
31506             e.getMessage()).setUnfinishedMessage(this);
31507       } finally {
31508         this.unknownFields = unknownFields.build();
31509         makeExtensionsImmutable();
31510       }
31511     }
31512     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31513         getDescriptor() {
31514       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor;
31515     }
31516 
31517     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31518         internalGetFieldAccessorTable() {
31519       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable
31520           .ensureFieldAccessorsInitialized(
31521               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.Builder.class);
31522     }
31523 
31524     public static com.google.protobuf.Parser<IsCatalogJanitorEnabledRequest> PARSER =
31525         new com.google.protobuf.AbstractParser<IsCatalogJanitorEnabledRequest>() {
31526       public IsCatalogJanitorEnabledRequest parsePartialFrom(
31527           com.google.protobuf.CodedInputStream input,
31528           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31529           throws com.google.protobuf.InvalidProtocolBufferException {
31530         return new IsCatalogJanitorEnabledRequest(input, extensionRegistry);
31531       }
31532     };
31533 
31534     @java.lang.Override
getParserForType()31535     public com.google.protobuf.Parser<IsCatalogJanitorEnabledRequest> getParserForType() {
31536       return PARSER;
31537     }
31538 
initFields()31539     private void initFields() {
31540     }
31541     private byte memoizedIsInitialized = -1;
isInitialized()31542     public final boolean isInitialized() {
31543       byte isInitialized = memoizedIsInitialized;
31544       if (isInitialized != -1) return isInitialized == 1;
31545 
31546       memoizedIsInitialized = 1;
31547       return true;
31548     }
31549 
writeTo(com.google.protobuf.CodedOutputStream output)31550     public void writeTo(com.google.protobuf.CodedOutputStream output)
31551                         throws java.io.IOException {
31552       getSerializedSize();
31553       getUnknownFields().writeTo(output);
31554     }
31555 
31556     private int memoizedSerializedSize = -1;
getSerializedSize()31557     public int getSerializedSize() {
31558       int size = memoizedSerializedSize;
31559       if (size != -1) return size;
31560 
31561       size = 0;
31562       size += getUnknownFields().getSerializedSize();
31563       memoizedSerializedSize = size;
31564       return size;
31565     }
31566 
31567     private static final long serialVersionUID = 0L;
31568     @java.lang.Override
writeReplace()31569     protected java.lang.Object writeReplace()
31570         throws java.io.ObjectStreamException {
31571       return super.writeReplace();
31572     }
31573 
31574     @java.lang.Override
equals(final java.lang.Object obj)31575     public boolean equals(final java.lang.Object obj) {
31576       if (obj == this) {
31577        return true;
31578       }
31579       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)) {
31580         return super.equals(obj);
31581       }
31582       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) obj;
31583 
31584       boolean result = true;
31585       result = result &&
31586           getUnknownFields().equals(other.getUnknownFields());
31587       return result;
31588     }
31589 
31590     private int memoizedHashCode = 0;
31591     @java.lang.Override
hashCode()31592     public int hashCode() {
31593       if (memoizedHashCode != 0) {
31594         return memoizedHashCode;
31595       }
31596       int hash = 41;
31597       hash = (19 * hash) + getDescriptorForType().hashCode();
31598       hash = (29 * hash) + getUnknownFields().hashCode();
31599       memoizedHashCode = hash;
31600       return hash;
31601     }
31602 
parseFrom( com.google.protobuf.ByteString data)31603     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(
31604         com.google.protobuf.ByteString data)
31605         throws com.google.protobuf.InvalidProtocolBufferException {
31606       return PARSER.parseFrom(data);
31607     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31608     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(
31609         com.google.protobuf.ByteString data,
31610         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31611         throws com.google.protobuf.InvalidProtocolBufferException {
31612       return PARSER.parseFrom(data, extensionRegistry);
31613     }
parseFrom(byte[] data)31614     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(byte[] data)
31615         throws com.google.protobuf.InvalidProtocolBufferException {
31616       return PARSER.parseFrom(data);
31617     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31618     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(
31619         byte[] data,
31620         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31621         throws com.google.protobuf.InvalidProtocolBufferException {
31622       return PARSER.parseFrom(data, extensionRegistry);
31623     }
parseFrom(java.io.InputStream input)31624     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(java.io.InputStream input)
31625         throws java.io.IOException {
31626       return PARSER.parseFrom(input);
31627     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31628     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(
31629         java.io.InputStream input,
31630         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31631         throws java.io.IOException {
31632       return PARSER.parseFrom(input, extensionRegistry);
31633     }
parseDelimitedFrom(java.io.InputStream input)31634     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(java.io.InputStream input)
31635         throws java.io.IOException {
31636       return PARSER.parseDelimitedFrom(input);
31637     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31638     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseDelimitedFrom(
31639         java.io.InputStream input,
31640         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31641         throws java.io.IOException {
31642       return PARSER.parseDelimitedFrom(input, extensionRegistry);
31643     }
parseFrom( com.google.protobuf.CodedInputStream input)31644     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(
31645         com.google.protobuf.CodedInputStream input)
31646         throws java.io.IOException {
31647       return PARSER.parseFrom(input);
31648     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31649     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parseFrom(
31650         com.google.protobuf.CodedInputStream input,
31651         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31652         throws java.io.IOException {
31653       return PARSER.parseFrom(input, extensionRegistry);
31654     }
31655 
newBuilder()31656     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()31657     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest prototype)31658     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest prototype) {
31659       return newBuilder().mergeFrom(prototype);
31660     }
toBuilder()31661     public Builder toBuilder() { return newBuilder(this); }
31662 
31663     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)31664     protected Builder newBuilderForType(
31665         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31666       Builder builder = new Builder(parent);
31667       return builder;
31668     }
31669     /**
31670      * Protobuf type {@code IsCatalogJanitorEnabledRequest}
31671      */
31672     public static final class Builder extends
31673         com.google.protobuf.GeneratedMessage.Builder<Builder>
31674        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequestOrBuilder {
31675       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31676           getDescriptor() {
31677         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor;
31678       }
31679 
31680       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31681           internalGetFieldAccessorTable() {
31682         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable
31683             .ensureFieldAccessorsInitialized(
31684                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.Builder.class);
31685       }
31686 
31687       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.newBuilder()
Builder()31688       private Builder() {
31689         maybeForceBuilderInitialization();
31690       }
31691 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)31692       private Builder(
31693           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31694         super(parent);
31695         maybeForceBuilderInitialization();
31696       }
maybeForceBuilderInitialization()31697       private void maybeForceBuilderInitialization() {
31698         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
31699         }
31700       }
create()31701       private static Builder create() {
31702         return new Builder();
31703       }
31704 
clear()31705       public Builder clear() {
31706         super.clear();
31707         return this;
31708       }
31709 
clone()31710       public Builder clone() {
31711         return create().mergeFrom(buildPartial());
31712       }
31713 
31714       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()31715           getDescriptorForType() {
31716         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledRequest_descriptor;
31717       }
31718 
getDefaultInstanceForType()31719       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest getDefaultInstanceForType() {
31720         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
31721       }
31722 
build()31723       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest build() {
31724         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest result = buildPartial();
31725         if (!result.isInitialized()) {
31726           throw newUninitializedMessageException(result);
31727         }
31728         return result;
31729       }
31730 
buildPartial()31731       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest buildPartial() {
31732         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest(this);
31733         onBuilt();
31734         return result;
31735       }
31736 
mergeFrom(com.google.protobuf.Message other)31737       public Builder mergeFrom(com.google.protobuf.Message other) {
31738         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) {
31739           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)other);
31740         } else {
31741           super.mergeFrom(other);
31742           return this;
31743         }
31744       }
31745 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other)31746       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest other) {
31747         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance()) return this;
31748         this.mergeUnknownFields(other.getUnknownFields());
31749         return this;
31750       }
31751 
isInitialized()31752       public final boolean isInitialized() {
31753         return true;
31754       }
31755 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31756       public Builder mergeFrom(
31757           com.google.protobuf.CodedInputStream input,
31758           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31759           throws java.io.IOException {
31760         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest parsedMessage = null;
31761         try {
31762           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
31763         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31764           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest) e.getUnfinishedMessage();
31765           throw e;
31766         } finally {
31767           if (parsedMessage != null) {
31768             mergeFrom(parsedMessage);
31769           }
31770         }
31771         return this;
31772       }
31773 
31774       // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledRequest)
31775     }
31776 
31777     static {
31778       defaultInstance = new IsCatalogJanitorEnabledRequest(true);
defaultInstance.initFields()31779       defaultInstance.initFields();
31780     }
31781 
31782     // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledRequest)
31783   }
31784 
31785   public interface IsCatalogJanitorEnabledResponseOrBuilder
31786       extends com.google.protobuf.MessageOrBuilder {
31787 
31788     // required bool value = 1;
31789     /**
31790      * <code>required bool value = 1;</code>
31791      */
hasValue()31792     boolean hasValue();
31793     /**
31794      * <code>required bool value = 1;</code>
31795      */
getValue()31796     boolean getValue();
31797   }
31798   /**
31799    * Protobuf type {@code IsCatalogJanitorEnabledResponse}
31800    */
31801   public static final class IsCatalogJanitorEnabledResponse extends
31802       com.google.protobuf.GeneratedMessage
31803       implements IsCatalogJanitorEnabledResponseOrBuilder {
31804     // Use IsCatalogJanitorEnabledResponse.newBuilder() to construct.
IsCatalogJanitorEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)31805     private IsCatalogJanitorEnabledResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
31806       super(builder);
31807       this.unknownFields = builder.getUnknownFields();
31808     }
IsCatalogJanitorEnabledResponse(boolean noInit)31809     private IsCatalogJanitorEnabledResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
31810 
31811     private static final IsCatalogJanitorEnabledResponse defaultInstance;
getDefaultInstance()31812     public static IsCatalogJanitorEnabledResponse getDefaultInstance() {
31813       return defaultInstance;
31814     }
31815 
getDefaultInstanceForType()31816     public IsCatalogJanitorEnabledResponse getDefaultInstanceForType() {
31817       return defaultInstance;
31818     }
31819 
31820     private final com.google.protobuf.UnknownFieldSet unknownFields;
31821     @java.lang.Override
31822     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()31823         getUnknownFields() {
31824       return this.unknownFields;
31825     }
IsCatalogJanitorEnabledResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31826     private IsCatalogJanitorEnabledResponse(
31827         com.google.protobuf.CodedInputStream input,
31828         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31829         throws com.google.protobuf.InvalidProtocolBufferException {
31830       initFields();
31831       int mutable_bitField0_ = 0;
31832       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
31833           com.google.protobuf.UnknownFieldSet.newBuilder();
31834       try {
31835         boolean done = false;
31836         while (!done) {
31837           int tag = input.readTag();
31838           switch (tag) {
31839             case 0:
31840               done = true;
31841               break;
31842             default: {
31843               if (!parseUnknownField(input, unknownFields,
31844                                      extensionRegistry, tag)) {
31845                 done = true;
31846               }
31847               break;
31848             }
31849             case 8: {
31850               bitField0_ |= 0x00000001;
31851               value_ = input.readBool();
31852               break;
31853             }
31854           }
31855         }
31856       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31857         throw e.setUnfinishedMessage(this);
31858       } catch (java.io.IOException e) {
31859         throw new com.google.protobuf.InvalidProtocolBufferException(
31860             e.getMessage()).setUnfinishedMessage(this);
31861       } finally {
31862         this.unknownFields = unknownFields.build();
31863         makeExtensionsImmutable();
31864       }
31865     }
31866     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31867         getDescriptor() {
31868       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor;
31869     }
31870 
31871     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31872         internalGetFieldAccessorTable() {
31873       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable
31874           .ensureFieldAccessorsInitialized(
31875               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.Builder.class);
31876     }
31877 
31878     public static com.google.protobuf.Parser<IsCatalogJanitorEnabledResponse> PARSER =
31879         new com.google.protobuf.AbstractParser<IsCatalogJanitorEnabledResponse>() {
31880       public IsCatalogJanitorEnabledResponse parsePartialFrom(
31881           com.google.protobuf.CodedInputStream input,
31882           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31883           throws com.google.protobuf.InvalidProtocolBufferException {
31884         return new IsCatalogJanitorEnabledResponse(input, extensionRegistry);
31885       }
31886     };
31887 
31888     @java.lang.Override
getParserForType()31889     public com.google.protobuf.Parser<IsCatalogJanitorEnabledResponse> getParserForType() {
31890       return PARSER;
31891     }
31892 
31893     private int bitField0_;
31894     // required bool value = 1;
31895     public static final int VALUE_FIELD_NUMBER = 1;
31896     private boolean value_;
31897     /**
31898      * <code>required bool value = 1;</code>
31899      */
hasValue()31900     public boolean hasValue() {
31901       return ((bitField0_ & 0x00000001) == 0x00000001);
31902     }
31903     /**
31904      * <code>required bool value = 1;</code>
31905      */
getValue()31906     public boolean getValue() {
31907       return value_;
31908     }
31909 
initFields()31910     private void initFields() {
31911       value_ = false;
31912     }
31913     private byte memoizedIsInitialized = -1;
isInitialized()31914     public final boolean isInitialized() {
31915       byte isInitialized = memoizedIsInitialized;
31916       if (isInitialized != -1) return isInitialized == 1;
31917 
31918       if (!hasValue()) {
31919         memoizedIsInitialized = 0;
31920         return false;
31921       }
31922       memoizedIsInitialized = 1;
31923       return true;
31924     }
31925 
writeTo(com.google.protobuf.CodedOutputStream output)31926     public void writeTo(com.google.protobuf.CodedOutputStream output)
31927                         throws java.io.IOException {
31928       getSerializedSize();
31929       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31930         output.writeBool(1, value_);
31931       }
31932       getUnknownFields().writeTo(output);
31933     }
31934 
31935     private int memoizedSerializedSize = -1;
getSerializedSize()31936     public int getSerializedSize() {
31937       int size = memoizedSerializedSize;
31938       if (size != -1) return size;
31939 
31940       size = 0;
31941       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31942         size += com.google.protobuf.CodedOutputStream
31943           .computeBoolSize(1, value_);
31944       }
31945       size += getUnknownFields().getSerializedSize();
31946       memoizedSerializedSize = size;
31947       return size;
31948     }
31949 
31950     private static final long serialVersionUID = 0L;
31951     @java.lang.Override
writeReplace()31952     protected java.lang.Object writeReplace()
31953         throws java.io.ObjectStreamException {
31954       return super.writeReplace();
31955     }
31956 
31957     @java.lang.Override
equals(final java.lang.Object obj)31958     public boolean equals(final java.lang.Object obj) {
31959       if (obj == this) {
31960        return true;
31961       }
31962       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse)) {
31963         return super.equals(obj);
31964       }
31965       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) obj;
31966 
31967       boolean result = true;
31968       result = result && (hasValue() == other.hasValue());
31969       if (hasValue()) {
31970         result = result && (getValue()
31971             == other.getValue());
31972       }
31973       result = result &&
31974           getUnknownFields().equals(other.getUnknownFields());
31975       return result;
31976     }
31977 
31978     private int memoizedHashCode = 0;
31979     @java.lang.Override
hashCode()31980     public int hashCode() {
31981       if (memoizedHashCode != 0) {
31982         return memoizedHashCode;
31983       }
31984       int hash = 41;
31985       hash = (19 * hash) + getDescriptorForType().hashCode();
31986       if (hasValue()) {
31987         hash = (37 * hash) + VALUE_FIELD_NUMBER;
31988         hash = (53 * hash) + hashBoolean(getValue());
31989       }
31990       hash = (29 * hash) + getUnknownFields().hashCode();
31991       memoizedHashCode = hash;
31992       return hash;
31993     }
31994 
parseFrom( com.google.protobuf.ByteString data)31995     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(
31996         com.google.protobuf.ByteString data)
31997         throws com.google.protobuf.InvalidProtocolBufferException {
31998       return PARSER.parseFrom(data);
31999     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32000     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(
32001         com.google.protobuf.ByteString data,
32002         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32003         throws com.google.protobuf.InvalidProtocolBufferException {
32004       return PARSER.parseFrom(data, extensionRegistry);
32005     }
parseFrom(byte[] data)32006     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(byte[] data)
32007         throws com.google.protobuf.InvalidProtocolBufferException {
32008       return PARSER.parseFrom(data);
32009     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32010     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(
32011         byte[] data,
32012         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32013         throws com.google.protobuf.InvalidProtocolBufferException {
32014       return PARSER.parseFrom(data, extensionRegistry);
32015     }
parseFrom(java.io.InputStream input)32016     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(java.io.InputStream input)
32017         throws java.io.IOException {
32018       return PARSER.parseFrom(input);
32019     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32020     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(
32021         java.io.InputStream input,
32022         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32023         throws java.io.IOException {
32024       return PARSER.parseFrom(input, extensionRegistry);
32025     }
parseDelimitedFrom(java.io.InputStream input)32026     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(java.io.InputStream input)
32027         throws java.io.IOException {
32028       return PARSER.parseDelimitedFrom(input);
32029     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32030     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseDelimitedFrom(
32031         java.io.InputStream input,
32032         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32033         throws java.io.IOException {
32034       return PARSER.parseDelimitedFrom(input, extensionRegistry);
32035     }
parseFrom( com.google.protobuf.CodedInputStream input)32036     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(
32037         com.google.protobuf.CodedInputStream input)
32038         throws java.io.IOException {
32039       return PARSER.parseFrom(input);
32040     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32041     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parseFrom(
32042         com.google.protobuf.CodedInputStream input,
32043         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32044         throws java.io.IOException {
32045       return PARSER.parseFrom(input, extensionRegistry);
32046     }
32047 
newBuilder()32048     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()32049     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse prototype)32050     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse prototype) {
32051       return newBuilder().mergeFrom(prototype);
32052     }
toBuilder()32053     public Builder toBuilder() { return newBuilder(this); }
32054 
32055     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)32056     protected Builder newBuilderForType(
32057         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
32058       Builder builder = new Builder(parent);
32059       return builder;
32060     }
32061     /**
32062      * Protobuf type {@code IsCatalogJanitorEnabledResponse}
32063      */
32064     public static final class Builder extends
32065         com.google.protobuf.GeneratedMessage.Builder<Builder>
32066        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponseOrBuilder {
32067       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()32068           getDescriptor() {
32069         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor;
32070       }
32071 
32072       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()32073           internalGetFieldAccessorTable() {
32074         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable
32075             .ensureFieldAccessorsInitialized(
32076                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.Builder.class);
32077       }
32078 
32079       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.newBuilder()
Builder()32080       private Builder() {
32081         maybeForceBuilderInitialization();
32082       }
32083 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)32084       private Builder(
32085           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
32086         super(parent);
32087         maybeForceBuilderInitialization();
32088       }
maybeForceBuilderInitialization()32089       private void maybeForceBuilderInitialization() {
32090         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
32091         }
32092       }
create()32093       private static Builder create() {
32094         return new Builder();
32095       }
32096 
clear()32097       public Builder clear() {
32098         super.clear();
32099         value_ = false;
32100         bitField0_ = (bitField0_ & ~0x00000001);
32101         return this;
32102       }
32103 
clone()32104       public Builder clone() {
32105         return create().mergeFrom(buildPartial());
32106       }
32107 
32108       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()32109           getDescriptorForType() {
32110         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsCatalogJanitorEnabledResponse_descriptor;
32111       }
32112 
getDefaultInstanceForType()32113       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse getDefaultInstanceForType() {
32114         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
32115       }
32116 
build()32117       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse build() {
32118         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse result = buildPartial();
32119         if (!result.isInitialized()) {
32120           throw newUninitializedMessageException(result);
32121         }
32122         return result;
32123       }
32124 
buildPartial()32125       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse buildPartial() {
32126         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse(this);
32127         int from_bitField0_ = bitField0_;
32128         int to_bitField0_ = 0;
32129         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
32130           to_bitField0_ |= 0x00000001;
32131         }
32132         result.value_ = value_;
32133         result.bitField0_ = to_bitField0_;
32134         onBuilt();
32135         return result;
32136       }
32137 
mergeFrom(com.google.protobuf.Message other)32138       public Builder mergeFrom(com.google.protobuf.Message other) {
32139         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) {
32140           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse)other);
32141         } else {
32142           super.mergeFrom(other);
32143           return this;
32144         }
32145       }
32146 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse other)32147       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse other) {
32148         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()) return this;
32149         if (other.hasValue()) {
32150           setValue(other.getValue());
32151         }
32152         this.mergeUnknownFields(other.getUnknownFields());
32153         return this;
32154       }
32155 
isInitialized()32156       public final boolean isInitialized() {
32157         if (!hasValue()) {
32158 
32159           return false;
32160         }
32161         return true;
32162       }
32163 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32164       public Builder mergeFrom(
32165           com.google.protobuf.CodedInputStream input,
32166           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32167           throws java.io.IOException {
32168         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse parsedMessage = null;
32169         try {
32170           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
32171         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32172           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) e.getUnfinishedMessage();
32173           throw e;
32174         } finally {
32175           if (parsedMessage != null) {
32176             mergeFrom(parsedMessage);
32177           }
32178         }
32179         return this;
32180       }
32181       private int bitField0_;
32182 
32183       // required bool value = 1;
32184       private boolean value_ ;
32185       /**
32186        * <code>required bool value = 1;</code>
32187        */
hasValue()32188       public boolean hasValue() {
32189         return ((bitField0_ & 0x00000001) == 0x00000001);
32190       }
32191       /**
32192        * <code>required bool value = 1;</code>
32193        */
getValue()32194       public boolean getValue() {
32195         return value_;
32196       }
32197       /**
32198        * <code>required bool value = 1;</code>
32199        */
setValue(boolean value)32200       public Builder setValue(boolean value) {
32201         bitField0_ |= 0x00000001;
32202         value_ = value;
32203         onChanged();
32204         return this;
32205       }
32206       /**
32207        * <code>required bool value = 1;</code>
32208        */
clearValue()32209       public Builder clearValue() {
32210         bitField0_ = (bitField0_ & ~0x00000001);
32211         value_ = false;
32212         onChanged();
32213         return this;
32214       }
32215 
32216       // @@protoc_insertion_point(builder_scope:IsCatalogJanitorEnabledResponse)
32217     }
32218 
32219     static {
32220       defaultInstance = new IsCatalogJanitorEnabledResponse(true);
defaultInstance.initFields()32221       defaultInstance.initFields();
32222     }
32223 
32224     // @@protoc_insertion_point(class_scope:IsCatalogJanitorEnabledResponse)
32225   }
32226 
32227   public interface SnapshotRequestOrBuilder
32228       extends com.google.protobuf.MessageOrBuilder {
32229 
32230     // required .SnapshotDescription snapshot = 1;
32231     /**
32232      * <code>required .SnapshotDescription snapshot = 1;</code>
32233      */
hasSnapshot()32234     boolean hasSnapshot();
32235     /**
32236      * <code>required .SnapshotDescription snapshot = 1;</code>
32237      */
getSnapshot()32238     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
32239     /**
32240      * <code>required .SnapshotDescription snapshot = 1;</code>
32241      */
getSnapshotOrBuilder()32242     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
32243   }
32244   /**
32245    * Protobuf type {@code SnapshotRequest}
32246    */
32247   public static final class SnapshotRequest extends
32248       com.google.protobuf.GeneratedMessage
32249       implements SnapshotRequestOrBuilder {
32250     // Use SnapshotRequest.newBuilder() to construct.
SnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)32251     private SnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
32252       super(builder);
32253       this.unknownFields = builder.getUnknownFields();
32254     }
SnapshotRequest(boolean noInit)32255     private SnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
32256 
32257     private static final SnapshotRequest defaultInstance;
getDefaultInstance()32258     public static SnapshotRequest getDefaultInstance() {
32259       return defaultInstance;
32260     }
32261 
getDefaultInstanceForType()32262     public SnapshotRequest getDefaultInstanceForType() {
32263       return defaultInstance;
32264     }
32265 
32266     private final com.google.protobuf.UnknownFieldSet unknownFields;
32267     @java.lang.Override
32268     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()32269         getUnknownFields() {
32270       return this.unknownFields;
32271     }
SnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32272     private SnapshotRequest(
32273         com.google.protobuf.CodedInputStream input,
32274         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32275         throws com.google.protobuf.InvalidProtocolBufferException {
32276       initFields();
32277       int mutable_bitField0_ = 0;
32278       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
32279           com.google.protobuf.UnknownFieldSet.newBuilder();
32280       try {
32281         boolean done = false;
32282         while (!done) {
32283           int tag = input.readTag();
32284           switch (tag) {
32285             case 0:
32286               done = true;
32287               break;
32288             default: {
32289               if (!parseUnknownField(input, unknownFields,
32290                                      extensionRegistry, tag)) {
32291                 done = true;
32292               }
32293               break;
32294             }
32295             case 10: {
32296               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
32297               if (((bitField0_ & 0x00000001) == 0x00000001)) {
32298                 subBuilder = snapshot_.toBuilder();
32299               }
32300               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
32301               if (subBuilder != null) {
32302                 subBuilder.mergeFrom(snapshot_);
32303                 snapshot_ = subBuilder.buildPartial();
32304               }
32305               bitField0_ |= 0x00000001;
32306               break;
32307             }
32308           }
32309         }
32310       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32311         throw e.setUnfinishedMessage(this);
32312       } catch (java.io.IOException e) {
32313         throw new com.google.protobuf.InvalidProtocolBufferException(
32314             e.getMessage()).setUnfinishedMessage(this);
32315       } finally {
32316         this.unknownFields = unknownFields.build();
32317         makeExtensionsImmutable();
32318       }
32319     }
32320     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()32321         getDescriptor() {
32322       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_descriptor;
32323     }
32324 
32325     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()32326         internalGetFieldAccessorTable() {
32327       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_fieldAccessorTable
32328           .ensureFieldAccessorsInitialized(
32329               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class);
32330     }
32331 
32332     public static com.google.protobuf.Parser<SnapshotRequest> PARSER =
32333         new com.google.protobuf.AbstractParser<SnapshotRequest>() {
32334       public SnapshotRequest parsePartialFrom(
32335           com.google.protobuf.CodedInputStream input,
32336           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32337           throws com.google.protobuf.InvalidProtocolBufferException {
32338         return new SnapshotRequest(input, extensionRegistry);
32339       }
32340     };
32341 
32342     @java.lang.Override
getParserForType()32343     public com.google.protobuf.Parser<SnapshotRequest> getParserForType() {
32344       return PARSER;
32345     }
32346 
32347     private int bitField0_;
32348     // required .SnapshotDescription snapshot = 1;
32349     public static final int SNAPSHOT_FIELD_NUMBER = 1;
32350     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
32351     /**
32352      * <code>required .SnapshotDescription snapshot = 1;</code>
32353      */
hasSnapshot()32354     public boolean hasSnapshot() {
32355       return ((bitField0_ & 0x00000001) == 0x00000001);
32356     }
32357     /**
32358      * <code>required .SnapshotDescription snapshot = 1;</code>
32359      */
getSnapshot()32360     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
32361       return snapshot_;
32362     }
32363     /**
32364      * <code>required .SnapshotDescription snapshot = 1;</code>
32365      */
getSnapshotOrBuilder()32366     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
32367       return snapshot_;
32368     }
32369 
initFields()32370     private void initFields() {
32371       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
32372     }
32373     private byte memoizedIsInitialized = -1;
isInitialized()32374     public final boolean isInitialized() {
32375       byte isInitialized = memoizedIsInitialized;
32376       if (isInitialized != -1) return isInitialized == 1;
32377 
32378       if (!hasSnapshot()) {
32379         memoizedIsInitialized = 0;
32380         return false;
32381       }
32382       if (!getSnapshot().isInitialized()) {
32383         memoizedIsInitialized = 0;
32384         return false;
32385       }
32386       memoizedIsInitialized = 1;
32387       return true;
32388     }
32389 
writeTo(com.google.protobuf.CodedOutputStream output)32390     public void writeTo(com.google.protobuf.CodedOutputStream output)
32391                         throws java.io.IOException {
32392       getSerializedSize();
32393       if (((bitField0_ & 0x00000001) == 0x00000001)) {
32394         output.writeMessage(1, snapshot_);
32395       }
32396       getUnknownFields().writeTo(output);
32397     }
32398 
32399     private int memoizedSerializedSize = -1;
getSerializedSize()32400     public int getSerializedSize() {
32401       int size = memoizedSerializedSize;
32402       if (size != -1) return size;
32403 
32404       size = 0;
32405       if (((bitField0_ & 0x00000001) == 0x00000001)) {
32406         size += com.google.protobuf.CodedOutputStream
32407           .computeMessageSize(1, snapshot_);
32408       }
32409       size += getUnknownFields().getSerializedSize();
32410       memoizedSerializedSize = size;
32411       return size;
32412     }
32413 
32414     private static final long serialVersionUID = 0L;
32415     @java.lang.Override
writeReplace()32416     protected java.lang.Object writeReplace()
32417         throws java.io.ObjectStreamException {
32418       return super.writeReplace();
32419     }
32420 
32421     @java.lang.Override
equals(final java.lang.Object obj)32422     public boolean equals(final java.lang.Object obj) {
32423       if (obj == this) {
32424        return true;
32425       }
32426       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)) {
32427         return super.equals(obj);
32428       }
32429       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest) obj;
32430 
32431       boolean result = true;
32432       result = result && (hasSnapshot() == other.hasSnapshot());
32433       if (hasSnapshot()) {
32434         result = result && getSnapshot()
32435             .equals(other.getSnapshot());
32436       }
32437       result = result &&
32438           getUnknownFields().equals(other.getUnknownFields());
32439       return result;
32440     }
32441 
32442     private int memoizedHashCode = 0;
32443     @java.lang.Override
hashCode()32444     public int hashCode() {
32445       if (memoizedHashCode != 0) {
32446         return memoizedHashCode;
32447       }
32448       int hash = 41;
32449       hash = (19 * hash) + getDescriptorForType().hashCode();
32450       if (hasSnapshot()) {
32451         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
32452         hash = (53 * hash) + getSnapshot().hashCode();
32453       }
32454       hash = (29 * hash) + getUnknownFields().hashCode();
32455       memoizedHashCode = hash;
32456       return hash;
32457     }
32458 
parseFrom( com.google.protobuf.ByteString data)32459     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(
32460         com.google.protobuf.ByteString data)
32461         throws com.google.protobuf.InvalidProtocolBufferException {
32462       return PARSER.parseFrom(data);
32463     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32464     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(
32465         com.google.protobuf.ByteString data,
32466         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32467         throws com.google.protobuf.InvalidProtocolBufferException {
32468       return PARSER.parseFrom(data, extensionRegistry);
32469     }
parseFrom(byte[] data)32470     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(byte[] data)
32471         throws com.google.protobuf.InvalidProtocolBufferException {
32472       return PARSER.parseFrom(data);
32473     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32474     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(
32475         byte[] data,
32476         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32477         throws com.google.protobuf.InvalidProtocolBufferException {
32478       return PARSER.parseFrom(data, extensionRegistry);
32479     }
parseFrom(java.io.InputStream input)32480     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(java.io.InputStream input)
32481         throws java.io.IOException {
32482       return PARSER.parseFrom(input);
32483     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32484     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(
32485         java.io.InputStream input,
32486         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32487         throws java.io.IOException {
32488       return PARSER.parseFrom(input, extensionRegistry);
32489     }
parseDelimitedFrom(java.io.InputStream input)32490     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom(java.io.InputStream input)
32491         throws java.io.IOException {
32492       return PARSER.parseDelimitedFrom(input);
32493     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32494     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseDelimitedFrom(
32495         java.io.InputStream input,
32496         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32497         throws java.io.IOException {
32498       return PARSER.parseDelimitedFrom(input, extensionRegistry);
32499     }
parseFrom( com.google.protobuf.CodedInputStream input)32500     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(
32501         com.google.protobuf.CodedInputStream input)
32502         throws java.io.IOException {
32503       return PARSER.parseFrom(input);
32504     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32505     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parseFrom(
32506         com.google.protobuf.CodedInputStream input,
32507         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32508         throws java.io.IOException {
32509       return PARSER.parseFrom(input, extensionRegistry);
32510     }
32511 
newBuilder()32512     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()32513     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest prototype)32514     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest prototype) {
32515       return newBuilder().mergeFrom(prototype);
32516     }
toBuilder()32517     public Builder toBuilder() { return newBuilder(this); }
32518 
32519     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)32520     protected Builder newBuilderForType(
32521         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
32522       Builder builder = new Builder(parent);
32523       return builder;
32524     }
32525     /**
32526      * Protobuf type {@code SnapshotRequest}
32527      */
32528     public static final class Builder extends
32529         com.google.protobuf.GeneratedMessage.Builder<Builder>
32530        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequestOrBuilder {
32531       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()32532           getDescriptor() {
32533         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_descriptor;
32534       }
32535 
32536       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()32537           internalGetFieldAccessorTable() {
32538         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_fieldAccessorTable
32539             .ensureFieldAccessorsInitialized(
32540                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.Builder.class);
32541       }
32542 
32543       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.newBuilder()
Builder()32544       private Builder() {
32545         maybeForceBuilderInitialization();
32546       }
32547 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)32548       private Builder(
32549           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
32550         super(parent);
32551         maybeForceBuilderInitialization();
32552       }
maybeForceBuilderInitialization()32553       private void maybeForceBuilderInitialization() {
32554         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
32555           getSnapshotFieldBuilder();
32556         }
32557       }
create()32558       private static Builder create() {
32559         return new Builder();
32560       }
32561 
clear()32562       public Builder clear() {
32563         super.clear();
32564         if (snapshotBuilder_ == null) {
32565           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
32566         } else {
32567           snapshotBuilder_.clear();
32568         }
32569         bitField0_ = (bitField0_ & ~0x00000001);
32570         return this;
32571       }
32572 
clone()32573       public Builder clone() {
32574         return create().mergeFrom(buildPartial());
32575       }
32576 
32577       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()32578           getDescriptorForType() {
32579         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotRequest_descriptor;
32580       }
32581 
getDefaultInstanceForType()32582       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest getDefaultInstanceForType() {
32583         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
32584       }
32585 
build()32586       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest build() {
32587         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest result = buildPartial();
32588         if (!result.isInitialized()) {
32589           throw newUninitializedMessageException(result);
32590         }
32591         return result;
32592       }
32593 
buildPartial()32594       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest buildPartial() {
32595         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest(this);
32596         int from_bitField0_ = bitField0_;
32597         int to_bitField0_ = 0;
32598         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
32599           to_bitField0_ |= 0x00000001;
32600         }
32601         if (snapshotBuilder_ == null) {
32602           result.snapshot_ = snapshot_;
32603         } else {
32604           result.snapshot_ = snapshotBuilder_.build();
32605         }
32606         result.bitField0_ = to_bitField0_;
32607         onBuilt();
32608         return result;
32609       }
32610 
mergeFrom(com.google.protobuf.Message other)32611       public Builder mergeFrom(com.google.protobuf.Message other) {
32612         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest) {
32613           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)other);
32614         } else {
32615           super.mergeFrom(other);
32616           return this;
32617         }
32618       }
32619 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest other)32620       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest other) {
32621         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance()) return this;
32622         if (other.hasSnapshot()) {
32623           mergeSnapshot(other.getSnapshot());
32624         }
32625         this.mergeUnknownFields(other.getUnknownFields());
32626         return this;
32627       }
32628 
isInitialized()32629       public final boolean isInitialized() {
32630         if (!hasSnapshot()) {
32631 
32632           return false;
32633         }
32634         if (!getSnapshot().isInitialized()) {
32635 
32636           return false;
32637         }
32638         return true;
32639       }
32640 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32641       public Builder mergeFrom(
32642           com.google.protobuf.CodedInputStream input,
32643           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32644           throws java.io.IOException {
32645         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest parsedMessage = null;
32646         try {
32647           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
32648         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32649           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest) e.getUnfinishedMessage();
32650           throw e;
32651         } finally {
32652           if (parsedMessage != null) {
32653             mergeFrom(parsedMessage);
32654           }
32655         }
32656         return this;
32657       }
32658       private int bitField0_;
32659 
32660       // required .SnapshotDescription snapshot = 1;
32661       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
32662       private com.google.protobuf.SingleFieldBuilder<
32663           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
32664       /**
32665        * <code>required .SnapshotDescription snapshot = 1;</code>
32666        */
hasSnapshot()32667       public boolean hasSnapshot() {
32668         return ((bitField0_ & 0x00000001) == 0x00000001);
32669       }
32670       /**
32671        * <code>required .SnapshotDescription snapshot = 1;</code>
32672        */
getSnapshot()32673       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
32674         if (snapshotBuilder_ == null) {
32675           return snapshot_;
32676         } else {
32677           return snapshotBuilder_.getMessage();
32678         }
32679       }
32680       /**
32681        * <code>required .SnapshotDescription snapshot = 1;</code>
32682        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)32683       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
32684         if (snapshotBuilder_ == null) {
32685           if (value == null) {
32686             throw new NullPointerException();
32687           }
32688           snapshot_ = value;
32689           onChanged();
32690         } else {
32691           snapshotBuilder_.setMessage(value);
32692         }
32693         bitField0_ |= 0x00000001;
32694         return this;
32695       }
32696       /**
32697        * <code>required .SnapshotDescription snapshot = 1;</code>
32698        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)32699       public Builder setSnapshot(
32700           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
32701         if (snapshotBuilder_ == null) {
32702           snapshot_ = builderForValue.build();
32703           onChanged();
32704         } else {
32705           snapshotBuilder_.setMessage(builderForValue.build());
32706         }
32707         bitField0_ |= 0x00000001;
32708         return this;
32709       }
32710       /**
32711        * <code>required .SnapshotDescription snapshot = 1;</code>
32712        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)32713       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
32714         if (snapshotBuilder_ == null) {
32715           if (((bitField0_ & 0x00000001) == 0x00000001) &&
32716               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
32717             snapshot_ =
32718               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
32719           } else {
32720             snapshot_ = value;
32721           }
32722           onChanged();
32723         } else {
32724           snapshotBuilder_.mergeFrom(value);
32725         }
32726         bitField0_ |= 0x00000001;
32727         return this;
32728       }
32729       /**
32730        * <code>required .SnapshotDescription snapshot = 1;</code>
32731        */
clearSnapshot()32732       public Builder clearSnapshot() {
32733         if (snapshotBuilder_ == null) {
32734           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
32735           onChanged();
32736         } else {
32737           snapshotBuilder_.clear();
32738         }
32739         bitField0_ = (bitField0_ & ~0x00000001);
32740         return this;
32741       }
32742       /**
32743        * <code>required .SnapshotDescription snapshot = 1;</code>
32744        */
getSnapshotBuilder()32745       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
32746         bitField0_ |= 0x00000001;
32747         onChanged();
32748         return getSnapshotFieldBuilder().getBuilder();
32749       }
32750       /**
32751        * <code>required .SnapshotDescription snapshot = 1;</code>
32752        */
getSnapshotOrBuilder()32753       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
32754         if (snapshotBuilder_ != null) {
32755           return snapshotBuilder_.getMessageOrBuilder();
32756         } else {
32757           return snapshot_;
32758         }
32759       }
32760       /**
32761        * <code>required .SnapshotDescription snapshot = 1;</code>
32762        */
32763       private com.google.protobuf.SingleFieldBuilder<
32764           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotFieldBuilder()32765           getSnapshotFieldBuilder() {
32766         if (snapshotBuilder_ == null) {
32767           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
32768               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
32769                   snapshot_,
32770                   getParentForChildren(),
32771                   isClean());
32772           snapshot_ = null;
32773         }
32774         return snapshotBuilder_;
32775       }
32776 
32777       // @@protoc_insertion_point(builder_scope:SnapshotRequest)
32778     }
32779 
32780     static {
32781       defaultInstance = new SnapshotRequest(true);
defaultInstance.initFields()32782       defaultInstance.initFields();
32783     }
32784 
32785     // @@protoc_insertion_point(class_scope:SnapshotRequest)
32786   }
32787 
32788   public interface SnapshotResponseOrBuilder
32789       extends com.google.protobuf.MessageOrBuilder {
32790 
32791     // required int64 expected_timeout = 1;
32792     /**
32793      * <code>required int64 expected_timeout = 1;</code>
32794      */
hasExpectedTimeout()32795     boolean hasExpectedTimeout();
32796     /**
32797      * <code>required int64 expected_timeout = 1;</code>
32798      */
getExpectedTimeout()32799     long getExpectedTimeout();
32800   }
32801   /**
32802    * Protobuf type {@code SnapshotResponse}
32803    */
32804   public static final class SnapshotResponse extends
32805       com.google.protobuf.GeneratedMessage
32806       implements SnapshotResponseOrBuilder {
32807     // Use SnapshotResponse.newBuilder() to construct.
SnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)32808     private SnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
32809       super(builder);
32810       this.unknownFields = builder.getUnknownFields();
32811     }
SnapshotResponse(boolean noInit)32812     private SnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
32813 
32814     private static final SnapshotResponse defaultInstance;
getDefaultInstance()32815     public static SnapshotResponse getDefaultInstance() {
32816       return defaultInstance;
32817     }
32818 
getDefaultInstanceForType()32819     public SnapshotResponse getDefaultInstanceForType() {
32820       return defaultInstance;
32821     }
32822 
32823     private final com.google.protobuf.UnknownFieldSet unknownFields;
32824     @java.lang.Override
32825     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()32826         getUnknownFields() {
32827       return this.unknownFields;
32828     }
SnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32829     private SnapshotResponse(
32830         com.google.protobuf.CodedInputStream input,
32831         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32832         throws com.google.protobuf.InvalidProtocolBufferException {
32833       initFields();
32834       int mutable_bitField0_ = 0;
32835       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
32836           com.google.protobuf.UnknownFieldSet.newBuilder();
32837       try {
32838         boolean done = false;
32839         while (!done) {
32840           int tag = input.readTag();
32841           switch (tag) {
32842             case 0:
32843               done = true;
32844               break;
32845             default: {
32846               if (!parseUnknownField(input, unknownFields,
32847                                      extensionRegistry, tag)) {
32848                 done = true;
32849               }
32850               break;
32851             }
32852             case 8: {
32853               bitField0_ |= 0x00000001;
32854               expectedTimeout_ = input.readInt64();
32855               break;
32856             }
32857           }
32858         }
32859       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32860         throw e.setUnfinishedMessage(this);
32861       } catch (java.io.IOException e) {
32862         throw new com.google.protobuf.InvalidProtocolBufferException(
32863             e.getMessage()).setUnfinishedMessage(this);
32864       } finally {
32865         this.unknownFields = unknownFields.build();
32866         makeExtensionsImmutable();
32867       }
32868     }
32869     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()32870         getDescriptor() {
32871       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_descriptor;
32872     }
32873 
32874     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()32875         internalGetFieldAccessorTable() {
32876       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_fieldAccessorTable
32877           .ensureFieldAccessorsInitialized(
32878               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class);
32879     }
32880 
32881     public static com.google.protobuf.Parser<SnapshotResponse> PARSER =
32882         new com.google.protobuf.AbstractParser<SnapshotResponse>() {
32883       public SnapshotResponse parsePartialFrom(
32884           com.google.protobuf.CodedInputStream input,
32885           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32886           throws com.google.protobuf.InvalidProtocolBufferException {
32887         return new SnapshotResponse(input, extensionRegistry);
32888       }
32889     };
32890 
32891     @java.lang.Override
getParserForType()32892     public com.google.protobuf.Parser<SnapshotResponse> getParserForType() {
32893       return PARSER;
32894     }
32895 
32896     private int bitField0_;
32897     // required int64 expected_timeout = 1;
32898     public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1;
32899     private long expectedTimeout_;
32900     /**
32901      * <code>required int64 expected_timeout = 1;</code>
32902      */
hasExpectedTimeout()32903     public boolean hasExpectedTimeout() {
32904       return ((bitField0_ & 0x00000001) == 0x00000001);
32905     }
32906     /**
32907      * <code>required int64 expected_timeout = 1;</code>
32908      */
getExpectedTimeout()32909     public long getExpectedTimeout() {
32910       return expectedTimeout_;
32911     }
32912 
initFields()32913     private void initFields() {
32914       expectedTimeout_ = 0L;
32915     }
32916     private byte memoizedIsInitialized = -1;
isInitialized()32917     public final boolean isInitialized() {
32918       byte isInitialized = memoizedIsInitialized;
32919       if (isInitialized != -1) return isInitialized == 1;
32920 
32921       if (!hasExpectedTimeout()) {
32922         memoizedIsInitialized = 0;
32923         return false;
32924       }
32925       memoizedIsInitialized = 1;
32926       return true;
32927     }
32928 
writeTo(com.google.protobuf.CodedOutputStream output)32929     public void writeTo(com.google.protobuf.CodedOutputStream output)
32930                         throws java.io.IOException {
32931       getSerializedSize();
32932       if (((bitField0_ & 0x00000001) == 0x00000001)) {
32933         output.writeInt64(1, expectedTimeout_);
32934       }
32935       getUnknownFields().writeTo(output);
32936     }
32937 
32938     private int memoizedSerializedSize = -1;
getSerializedSize()32939     public int getSerializedSize() {
32940       int size = memoizedSerializedSize;
32941       if (size != -1) return size;
32942 
32943       size = 0;
32944       if (((bitField0_ & 0x00000001) == 0x00000001)) {
32945         size += com.google.protobuf.CodedOutputStream
32946           .computeInt64Size(1, expectedTimeout_);
32947       }
32948       size += getUnknownFields().getSerializedSize();
32949       memoizedSerializedSize = size;
32950       return size;
32951     }
32952 
32953     private static final long serialVersionUID = 0L;
32954     @java.lang.Override
writeReplace()32955     protected java.lang.Object writeReplace()
32956         throws java.io.ObjectStreamException {
32957       return super.writeReplace();
32958     }
32959 
32960     @java.lang.Override
equals(final java.lang.Object obj)32961     public boolean equals(final java.lang.Object obj) {
32962       if (obj == this) {
32963        return true;
32964       }
32965       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse)) {
32966         return super.equals(obj);
32967       }
32968       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) obj;
32969 
32970       boolean result = true;
32971       result = result && (hasExpectedTimeout() == other.hasExpectedTimeout());
32972       if (hasExpectedTimeout()) {
32973         result = result && (getExpectedTimeout()
32974             == other.getExpectedTimeout());
32975       }
32976       result = result &&
32977           getUnknownFields().equals(other.getUnknownFields());
32978       return result;
32979     }
32980 
32981     private int memoizedHashCode = 0;
32982     @java.lang.Override
hashCode()32983     public int hashCode() {
32984       if (memoizedHashCode != 0) {
32985         return memoizedHashCode;
32986       }
32987       int hash = 41;
32988       hash = (19 * hash) + getDescriptorForType().hashCode();
32989       if (hasExpectedTimeout()) {
32990         hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER;
32991         hash = (53 * hash) + hashLong(getExpectedTimeout());
32992       }
32993       hash = (29 * hash) + getUnknownFields().hashCode();
32994       memoizedHashCode = hash;
32995       return hash;
32996     }
32997 
parseFrom( com.google.protobuf.ByteString data)32998     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(
32999         com.google.protobuf.ByteString data)
33000         throws com.google.protobuf.InvalidProtocolBufferException {
33001       return PARSER.parseFrom(data);
33002     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33003     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(
33004         com.google.protobuf.ByteString data,
33005         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33006         throws com.google.protobuf.InvalidProtocolBufferException {
33007       return PARSER.parseFrom(data, extensionRegistry);
33008     }
parseFrom(byte[] data)33009     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(byte[] data)
33010         throws com.google.protobuf.InvalidProtocolBufferException {
33011       return PARSER.parseFrom(data);
33012     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33013     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(
33014         byte[] data,
33015         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33016         throws com.google.protobuf.InvalidProtocolBufferException {
33017       return PARSER.parseFrom(data, extensionRegistry);
33018     }
parseFrom(java.io.InputStream input)33019     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(java.io.InputStream input)
33020         throws java.io.IOException {
33021       return PARSER.parseFrom(input);
33022     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33023     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(
33024         java.io.InputStream input,
33025         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33026         throws java.io.IOException {
33027       return PARSER.parseFrom(input, extensionRegistry);
33028     }
parseDelimitedFrom(java.io.InputStream input)33029     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom(java.io.InputStream input)
33030         throws java.io.IOException {
33031       return PARSER.parseDelimitedFrom(input);
33032     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33033     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseDelimitedFrom(
33034         java.io.InputStream input,
33035         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33036         throws java.io.IOException {
33037       return PARSER.parseDelimitedFrom(input, extensionRegistry);
33038     }
parseFrom( com.google.protobuf.CodedInputStream input)33039     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(
33040         com.google.protobuf.CodedInputStream input)
33041         throws java.io.IOException {
33042       return PARSER.parseFrom(input);
33043     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33044     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parseFrom(
33045         com.google.protobuf.CodedInputStream input,
33046         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33047         throws java.io.IOException {
33048       return PARSER.parseFrom(input, extensionRegistry);
33049     }
33050 
newBuilder()33051     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()33052     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse prototype)33053     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse prototype) {
33054       return newBuilder().mergeFrom(prototype);
33055     }
toBuilder()33056     public Builder toBuilder() { return newBuilder(this); }
33057 
33058     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)33059     protected Builder newBuilderForType(
33060         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33061       Builder builder = new Builder(parent);
33062       return builder;
33063     }
33064     /**
33065      * Protobuf type {@code SnapshotResponse}
33066      */
33067     public static final class Builder extends
33068         com.google.protobuf.GeneratedMessage.Builder<Builder>
33069        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponseOrBuilder {
33070       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()33071           getDescriptor() {
33072         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_descriptor;
33073       }
33074 
33075       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()33076           internalGetFieldAccessorTable() {
33077         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_fieldAccessorTable
33078             .ensureFieldAccessorsInitialized(
33079                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.Builder.class);
33080       }
33081 
33082       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.newBuilder()
Builder()33083       private Builder() {
33084         maybeForceBuilderInitialization();
33085       }
33086 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)33087       private Builder(
33088           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33089         super(parent);
33090         maybeForceBuilderInitialization();
33091       }
maybeForceBuilderInitialization()33092       private void maybeForceBuilderInitialization() {
33093         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
33094         }
33095       }
create()33096       private static Builder create() {
33097         return new Builder();
33098       }
33099 
clear()33100       public Builder clear() {
33101         super.clear();
33102         expectedTimeout_ = 0L;
33103         bitField0_ = (bitField0_ & ~0x00000001);
33104         return this;
33105       }
33106 
clone()33107       public Builder clone() {
33108         return create().mergeFrom(buildPartial());
33109       }
33110 
33111       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()33112           getDescriptorForType() {
33113         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SnapshotResponse_descriptor;
33114       }
33115 
getDefaultInstanceForType()33116       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse getDefaultInstanceForType() {
33117         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
33118       }
33119 
build()33120       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse build() {
33121         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse result = buildPartial();
33122         if (!result.isInitialized()) {
33123           throw newUninitializedMessageException(result);
33124         }
33125         return result;
33126       }
33127 
buildPartial()33128       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse buildPartial() {
33129         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse(this);
33130         int from_bitField0_ = bitField0_;
33131         int to_bitField0_ = 0;
33132         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
33133           to_bitField0_ |= 0x00000001;
33134         }
33135         result.expectedTimeout_ = expectedTimeout_;
33136         result.bitField0_ = to_bitField0_;
33137         onBuilt();
33138         return result;
33139       }
33140 
mergeFrom(com.google.protobuf.Message other)33141       public Builder mergeFrom(com.google.protobuf.Message other) {
33142         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) {
33143           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse)other);
33144         } else {
33145           super.mergeFrom(other);
33146           return this;
33147         }
33148       }
33149 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse other)33150       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse other) {
33151         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()) return this;
33152         if (other.hasExpectedTimeout()) {
33153           setExpectedTimeout(other.getExpectedTimeout());
33154         }
33155         this.mergeUnknownFields(other.getUnknownFields());
33156         return this;
33157       }
33158 
isInitialized()33159       public final boolean isInitialized() {
33160         if (!hasExpectedTimeout()) {
33161 
33162           return false;
33163         }
33164         return true;
33165       }
33166 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33167       public Builder mergeFrom(
33168           com.google.protobuf.CodedInputStream input,
33169           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33170           throws java.io.IOException {
33171         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse parsedMessage = null;
33172         try {
33173           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
33174         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
33175           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) e.getUnfinishedMessage();
33176           throw e;
33177         } finally {
33178           if (parsedMessage != null) {
33179             mergeFrom(parsedMessage);
33180           }
33181         }
33182         return this;
33183       }
33184       private int bitField0_;
33185 
33186       // required int64 expected_timeout = 1;
33187       private long expectedTimeout_ ;
33188       /**
33189        * <code>required int64 expected_timeout = 1;</code>
33190        */
hasExpectedTimeout()33191       public boolean hasExpectedTimeout() {
33192         return ((bitField0_ & 0x00000001) == 0x00000001);
33193       }
33194       /**
33195        * <code>required int64 expected_timeout = 1;</code>
33196        */
getExpectedTimeout()33197       public long getExpectedTimeout() {
33198         return expectedTimeout_;
33199       }
33200       /**
33201        * <code>required int64 expected_timeout = 1;</code>
33202        */
setExpectedTimeout(long value)33203       public Builder setExpectedTimeout(long value) {
33204         bitField0_ |= 0x00000001;
33205         expectedTimeout_ = value;
33206         onChanged();
33207         return this;
33208       }
33209       /**
33210        * <code>required int64 expected_timeout = 1;</code>
33211        */
clearExpectedTimeout()33212       public Builder clearExpectedTimeout() {
33213         bitField0_ = (bitField0_ & ~0x00000001);
33214         expectedTimeout_ = 0L;
33215         onChanged();
33216         return this;
33217       }
33218 
33219       // @@protoc_insertion_point(builder_scope:SnapshotResponse)
33220     }
33221 
33222     static {
33223       defaultInstance = new SnapshotResponse(true);
defaultInstance.initFields()33224       defaultInstance.initFields();
33225     }
33226 
33227     // @@protoc_insertion_point(class_scope:SnapshotResponse)
33228   }
33229 
33230   public interface GetCompletedSnapshotsRequestOrBuilder
33231       extends com.google.protobuf.MessageOrBuilder {
33232   }
33233   /**
33234    * Protobuf type {@code GetCompletedSnapshotsRequest}
33235    */
33236   public static final class GetCompletedSnapshotsRequest extends
33237       com.google.protobuf.GeneratedMessage
33238       implements GetCompletedSnapshotsRequestOrBuilder {
33239     // Use GetCompletedSnapshotsRequest.newBuilder() to construct.
GetCompletedSnapshotsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)33240     private GetCompletedSnapshotsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
33241       super(builder);
33242       this.unknownFields = builder.getUnknownFields();
33243     }
GetCompletedSnapshotsRequest(boolean noInit)33244     private GetCompletedSnapshotsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
33245 
33246     private static final GetCompletedSnapshotsRequest defaultInstance;
getDefaultInstance()33247     public static GetCompletedSnapshotsRequest getDefaultInstance() {
33248       return defaultInstance;
33249     }
33250 
getDefaultInstanceForType()33251     public GetCompletedSnapshotsRequest getDefaultInstanceForType() {
33252       return defaultInstance;
33253     }
33254 
33255     private final com.google.protobuf.UnknownFieldSet unknownFields;
33256     @java.lang.Override
33257     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()33258         getUnknownFields() {
33259       return this.unknownFields;
33260     }
GetCompletedSnapshotsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33261     private GetCompletedSnapshotsRequest(
33262         com.google.protobuf.CodedInputStream input,
33263         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33264         throws com.google.protobuf.InvalidProtocolBufferException {
33265       initFields();
33266       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
33267           com.google.protobuf.UnknownFieldSet.newBuilder();
33268       try {
33269         boolean done = false;
33270         while (!done) {
33271           int tag = input.readTag();
33272           switch (tag) {
33273             case 0:
33274               done = true;
33275               break;
33276             default: {
33277               if (!parseUnknownField(input, unknownFields,
33278                                      extensionRegistry, tag)) {
33279                 done = true;
33280               }
33281               break;
33282             }
33283           }
33284         }
33285       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
33286         throw e.setUnfinishedMessage(this);
33287       } catch (java.io.IOException e) {
33288         throw new com.google.protobuf.InvalidProtocolBufferException(
33289             e.getMessage()).setUnfinishedMessage(this);
33290       } finally {
33291         this.unknownFields = unknownFields.build();
33292         makeExtensionsImmutable();
33293       }
33294     }
33295     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()33296         getDescriptor() {
33297       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_descriptor;
33298     }
33299 
33300     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()33301         internalGetFieldAccessorTable() {
33302       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable
33303           .ensureFieldAccessorsInitialized(
33304               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class);
33305     }
33306 
33307     public static com.google.protobuf.Parser<GetCompletedSnapshotsRequest> PARSER =
33308         new com.google.protobuf.AbstractParser<GetCompletedSnapshotsRequest>() {
33309       public GetCompletedSnapshotsRequest parsePartialFrom(
33310           com.google.protobuf.CodedInputStream input,
33311           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33312           throws com.google.protobuf.InvalidProtocolBufferException {
33313         return new GetCompletedSnapshotsRequest(input, extensionRegistry);
33314       }
33315     };
33316 
33317     @java.lang.Override
getParserForType()33318     public com.google.protobuf.Parser<GetCompletedSnapshotsRequest> getParserForType() {
33319       return PARSER;
33320     }
33321 
initFields()33322     private void initFields() {
33323     }
33324     private byte memoizedIsInitialized = -1;
isInitialized()33325     public final boolean isInitialized() {
33326       byte isInitialized = memoizedIsInitialized;
33327       if (isInitialized != -1) return isInitialized == 1;
33328 
33329       memoizedIsInitialized = 1;
33330       return true;
33331     }
33332 
writeTo(com.google.protobuf.CodedOutputStream output)33333     public void writeTo(com.google.protobuf.CodedOutputStream output)
33334                         throws java.io.IOException {
33335       getSerializedSize();
33336       getUnknownFields().writeTo(output);
33337     }
33338 
33339     private int memoizedSerializedSize = -1;
getSerializedSize()33340     public int getSerializedSize() {
33341       int size = memoizedSerializedSize;
33342       if (size != -1) return size;
33343 
33344       size = 0;
33345       size += getUnknownFields().getSerializedSize();
33346       memoizedSerializedSize = size;
33347       return size;
33348     }
33349 
33350     private static final long serialVersionUID = 0L;
33351     @java.lang.Override
writeReplace()33352     protected java.lang.Object writeReplace()
33353         throws java.io.ObjectStreamException {
33354       return super.writeReplace();
33355     }
33356 
33357     @java.lang.Override
equals(final java.lang.Object obj)33358     public boolean equals(final java.lang.Object obj) {
33359       if (obj == this) {
33360        return true;
33361       }
33362       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)) {
33363         return super.equals(obj);
33364       }
33365       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) obj;
33366 
33367       boolean result = true;
33368       result = result &&
33369           getUnknownFields().equals(other.getUnknownFields());
33370       return result;
33371     }
33372 
33373     private int memoizedHashCode = 0;
33374     @java.lang.Override
hashCode()33375     public int hashCode() {
33376       if (memoizedHashCode != 0) {
33377         return memoizedHashCode;
33378       }
33379       int hash = 41;
33380       hash = (19 * hash) + getDescriptorForType().hashCode();
33381       hash = (29 * hash) + getUnknownFields().hashCode();
33382       memoizedHashCode = hash;
33383       return hash;
33384     }
33385 
parseFrom( com.google.protobuf.ByteString data)33386     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(
33387         com.google.protobuf.ByteString data)
33388         throws com.google.protobuf.InvalidProtocolBufferException {
33389       return PARSER.parseFrom(data);
33390     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33391     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(
33392         com.google.protobuf.ByteString data,
33393         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33394         throws com.google.protobuf.InvalidProtocolBufferException {
33395       return PARSER.parseFrom(data, extensionRegistry);
33396     }
parseFrom(byte[] data)33397     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(byte[] data)
33398         throws com.google.protobuf.InvalidProtocolBufferException {
33399       return PARSER.parseFrom(data);
33400     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33401     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(
33402         byte[] data,
33403         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33404         throws com.google.protobuf.InvalidProtocolBufferException {
33405       return PARSER.parseFrom(data, extensionRegistry);
33406     }
parseFrom(java.io.InputStream input)33407     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(java.io.InputStream input)
33408         throws java.io.IOException {
33409       return PARSER.parseFrom(input);
33410     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33411     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(
33412         java.io.InputStream input,
33413         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33414         throws java.io.IOException {
33415       return PARSER.parseFrom(input, extensionRegistry);
33416     }
parseDelimitedFrom(java.io.InputStream input)33417     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(java.io.InputStream input)
33418         throws java.io.IOException {
33419       return PARSER.parseDelimitedFrom(input);
33420     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33421     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseDelimitedFrom(
33422         java.io.InputStream input,
33423         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33424         throws java.io.IOException {
33425       return PARSER.parseDelimitedFrom(input, extensionRegistry);
33426     }
parseFrom( com.google.protobuf.CodedInputStream input)33427     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(
33428         com.google.protobuf.CodedInputStream input)
33429         throws java.io.IOException {
33430       return PARSER.parseFrom(input);
33431     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33432     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parseFrom(
33433         com.google.protobuf.CodedInputStream input,
33434         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33435         throws java.io.IOException {
33436       return PARSER.parseFrom(input, extensionRegistry);
33437     }
33438 
newBuilder()33439     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()33440     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest prototype)33441     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest prototype) {
33442       return newBuilder().mergeFrom(prototype);
33443     }
toBuilder()33444     public Builder toBuilder() { return newBuilder(this); }
33445 
33446     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)33447     protected Builder newBuilderForType(
33448         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33449       Builder builder = new Builder(parent);
33450       return builder;
33451     }
33452     /**
33453      * Protobuf type {@code GetCompletedSnapshotsRequest}
33454      */
33455     public static final class Builder extends
33456         com.google.protobuf.GeneratedMessage.Builder<Builder>
33457        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequestOrBuilder {
33458       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()33459           getDescriptor() {
33460         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_descriptor;
33461       }
33462 
33463       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()33464           internalGetFieldAccessorTable() {
33465         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable
33466             .ensureFieldAccessorsInitialized(
33467                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.Builder.class);
33468       }
33469 
33470       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.newBuilder()
Builder()33471       private Builder() {
33472         maybeForceBuilderInitialization();
33473       }
33474 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)33475       private Builder(
33476           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33477         super(parent);
33478         maybeForceBuilderInitialization();
33479       }
maybeForceBuilderInitialization()33480       private void maybeForceBuilderInitialization() {
33481         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
33482         }
33483       }
create()33484       private static Builder create() {
33485         return new Builder();
33486       }
33487 
clear()33488       public Builder clear() {
33489         super.clear();
33490         return this;
33491       }
33492 
clone()33493       public Builder clone() {
33494         return create().mergeFrom(buildPartial());
33495       }
33496 
33497       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()33498           getDescriptorForType() {
33499         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsRequest_descriptor;
33500       }
33501 
getDefaultInstanceForType()33502       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest getDefaultInstanceForType() {
33503         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
33504       }
33505 
build()33506       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest build() {
33507         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = buildPartial();
33508         if (!result.isInitialized()) {
33509           throw newUninitializedMessageException(result);
33510         }
33511         return result;
33512       }
33513 
buildPartial()33514       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest buildPartial() {
33515         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest(this);
33516         onBuilt();
33517         return result;
33518       }
33519 
mergeFrom(com.google.protobuf.Message other)33520       public Builder mergeFrom(com.google.protobuf.Message other) {
33521         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) {
33522           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)other);
33523         } else {
33524           super.mergeFrom(other);
33525           return this;
33526         }
33527       }
33528 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other)33529       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest other) {
33530         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance()) return this;
33531         this.mergeUnknownFields(other.getUnknownFields());
33532         return this;
33533       }
33534 
isInitialized()33535       public final boolean isInitialized() {
33536         return true;
33537       }
33538 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33539       public Builder mergeFrom(
33540           com.google.protobuf.CodedInputStream input,
33541           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33542           throws java.io.IOException {
33543         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest parsedMessage = null;
33544         try {
33545           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
33546         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
33547           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest) e.getUnfinishedMessage();
33548           throw e;
33549         } finally {
33550           if (parsedMessage != null) {
33551             mergeFrom(parsedMessage);
33552           }
33553         }
33554         return this;
33555       }
33556 
33557       // @@protoc_insertion_point(builder_scope:GetCompletedSnapshotsRequest)
33558     }
33559 
33560     static {
33561       defaultInstance = new GetCompletedSnapshotsRequest(true);
defaultInstance.initFields()33562       defaultInstance.initFields();
33563     }
33564 
33565     // @@protoc_insertion_point(class_scope:GetCompletedSnapshotsRequest)
33566   }
33567 
33568   public interface GetCompletedSnapshotsResponseOrBuilder
33569       extends com.google.protobuf.MessageOrBuilder {
33570 
33571     // repeated .SnapshotDescription snapshots = 1;
33572     /**
33573      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33574      */
33575     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription>
getSnapshotsList()33576         getSnapshotsList();
33577     /**
33578      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33579      */
getSnapshots(int index)33580     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index);
33581     /**
33582      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33583      */
getSnapshotsCount()33584     int getSnapshotsCount();
33585     /**
33586      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33587      */
33588     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotsOrBuilderList()33589         getSnapshotsOrBuilderList();
33590     /**
33591      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33592      */
getSnapshotsOrBuilder( int index)33593     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
33594         int index);
33595   }
33596   /**
33597    * Protobuf type {@code GetCompletedSnapshotsResponse}
33598    */
33599   public static final class GetCompletedSnapshotsResponse extends
33600       com.google.protobuf.GeneratedMessage
33601       implements GetCompletedSnapshotsResponseOrBuilder {
33602     // Use GetCompletedSnapshotsResponse.newBuilder() to construct.
GetCompletedSnapshotsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)33603     private GetCompletedSnapshotsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
33604       super(builder);
33605       this.unknownFields = builder.getUnknownFields();
33606     }
GetCompletedSnapshotsResponse(boolean noInit)33607     private GetCompletedSnapshotsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
33608 
33609     private static final GetCompletedSnapshotsResponse defaultInstance;
getDefaultInstance()33610     public static GetCompletedSnapshotsResponse getDefaultInstance() {
33611       return defaultInstance;
33612     }
33613 
getDefaultInstanceForType()33614     public GetCompletedSnapshotsResponse getDefaultInstanceForType() {
33615       return defaultInstance;
33616     }
33617 
33618     private final com.google.protobuf.UnknownFieldSet unknownFields;
33619     @java.lang.Override
33620     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()33621         getUnknownFields() {
33622       return this.unknownFields;
33623     }
GetCompletedSnapshotsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33624     private GetCompletedSnapshotsResponse(
33625         com.google.protobuf.CodedInputStream input,
33626         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33627         throws com.google.protobuf.InvalidProtocolBufferException {
33628       initFields();
33629       int mutable_bitField0_ = 0;
33630       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
33631           com.google.protobuf.UnknownFieldSet.newBuilder();
33632       try {
33633         boolean done = false;
33634         while (!done) {
33635           int tag = input.readTag();
33636           switch (tag) {
33637             case 0:
33638               done = true;
33639               break;
33640             default: {
33641               if (!parseUnknownField(input, unknownFields,
33642                                      extensionRegistry, tag)) {
33643                 done = true;
33644               }
33645               break;
33646             }
33647             case 10: {
33648               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
33649                 snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription>();
33650                 mutable_bitField0_ |= 0x00000001;
33651               }
33652               snapshots_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry));
33653               break;
33654             }
33655           }
33656         }
33657       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
33658         throw e.setUnfinishedMessage(this);
33659       } catch (java.io.IOException e) {
33660         throw new com.google.protobuf.InvalidProtocolBufferException(
33661             e.getMessage()).setUnfinishedMessage(this);
33662       } finally {
33663         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
33664           snapshots_ = java.util.Collections.unmodifiableList(snapshots_);
33665         }
33666         this.unknownFields = unknownFields.build();
33667         makeExtensionsImmutable();
33668       }
33669     }
33670     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()33671         getDescriptor() {
33672       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_descriptor;
33673     }
33674 
33675     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()33676         internalGetFieldAccessorTable() {
33677       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable
33678           .ensureFieldAccessorsInitialized(
33679               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.Builder.class);
33680     }
33681 
33682     public static com.google.protobuf.Parser<GetCompletedSnapshotsResponse> PARSER =
33683         new com.google.protobuf.AbstractParser<GetCompletedSnapshotsResponse>() {
33684       public GetCompletedSnapshotsResponse parsePartialFrom(
33685           com.google.protobuf.CodedInputStream input,
33686           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33687           throws com.google.protobuf.InvalidProtocolBufferException {
33688         return new GetCompletedSnapshotsResponse(input, extensionRegistry);
33689       }
33690     };
33691 
33692     @java.lang.Override
getParserForType()33693     public com.google.protobuf.Parser<GetCompletedSnapshotsResponse> getParserForType() {
33694       return PARSER;
33695     }
33696 
33697     // repeated .SnapshotDescription snapshots = 1;
33698     public static final int SNAPSHOTS_FIELD_NUMBER = 1;
33699     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> snapshots_;
33700     /**
33701      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33702      */
getSnapshotsList()33703     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList() {
33704       return snapshots_;
33705     }
33706     /**
33707      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33708      */
33709     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotsOrBuilderList()33710         getSnapshotsOrBuilderList() {
33711       return snapshots_;
33712     }
33713     /**
33714      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33715      */
getSnapshotsCount()33716     public int getSnapshotsCount() {
33717       return snapshots_.size();
33718     }
33719     /**
33720      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33721      */
getSnapshots(int index)33722     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) {
33723       return snapshots_.get(index);
33724     }
33725     /**
33726      * <code>repeated .SnapshotDescription snapshots = 1;</code>
33727      */
getSnapshotsOrBuilder( int index)33728     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
33729         int index) {
33730       return snapshots_.get(index);
33731     }
33732 
initFields()33733     private void initFields() {
33734       snapshots_ = java.util.Collections.emptyList();
33735     }
33736     private byte memoizedIsInitialized = -1;
isInitialized()33737     public final boolean isInitialized() {
33738       byte isInitialized = memoizedIsInitialized;
33739       if (isInitialized != -1) return isInitialized == 1;
33740 
33741       for (int i = 0; i < getSnapshotsCount(); i++) {
33742         if (!getSnapshots(i).isInitialized()) {
33743           memoizedIsInitialized = 0;
33744           return false;
33745         }
33746       }
33747       memoizedIsInitialized = 1;
33748       return true;
33749     }
33750 
writeTo(com.google.protobuf.CodedOutputStream output)33751     public void writeTo(com.google.protobuf.CodedOutputStream output)
33752                         throws java.io.IOException {
33753       getSerializedSize();
33754       for (int i = 0; i < snapshots_.size(); i++) {
33755         output.writeMessage(1, snapshots_.get(i));
33756       }
33757       getUnknownFields().writeTo(output);
33758     }
33759 
33760     private int memoizedSerializedSize = -1;
getSerializedSize()33761     public int getSerializedSize() {
33762       int size = memoizedSerializedSize;
33763       if (size != -1) return size;
33764 
33765       size = 0;
33766       for (int i = 0; i < snapshots_.size(); i++) {
33767         size += com.google.protobuf.CodedOutputStream
33768           .computeMessageSize(1, snapshots_.get(i));
33769       }
33770       size += getUnknownFields().getSerializedSize();
33771       memoizedSerializedSize = size;
33772       return size;
33773     }
33774 
33775     private static final long serialVersionUID = 0L;
33776     @java.lang.Override
writeReplace()33777     protected java.lang.Object writeReplace()
33778         throws java.io.ObjectStreamException {
33779       return super.writeReplace();
33780     }
33781 
33782     @java.lang.Override
equals(final java.lang.Object obj)33783     public boolean equals(final java.lang.Object obj) {
33784       if (obj == this) {
33785        return true;
33786       }
33787       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse)) {
33788         return super.equals(obj);
33789       }
33790       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) obj;
33791 
33792       boolean result = true;
33793       result = result && getSnapshotsList()
33794           .equals(other.getSnapshotsList());
33795       result = result &&
33796           getUnknownFields().equals(other.getUnknownFields());
33797       return result;
33798     }
33799 
33800     private int memoizedHashCode = 0;
33801     @java.lang.Override
hashCode()33802     public int hashCode() {
33803       if (memoizedHashCode != 0) {
33804         return memoizedHashCode;
33805       }
33806       int hash = 41;
33807       hash = (19 * hash) + getDescriptorForType().hashCode();
33808       if (getSnapshotsCount() > 0) {
33809         hash = (37 * hash) + SNAPSHOTS_FIELD_NUMBER;
33810         hash = (53 * hash) + getSnapshotsList().hashCode();
33811       }
33812       hash = (29 * hash) + getUnknownFields().hashCode();
33813       memoizedHashCode = hash;
33814       return hash;
33815     }
33816 
parseFrom( com.google.protobuf.ByteString data)33817     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(
33818         com.google.protobuf.ByteString data)
33819         throws com.google.protobuf.InvalidProtocolBufferException {
33820       return PARSER.parseFrom(data);
33821     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33822     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(
33823         com.google.protobuf.ByteString data,
33824         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33825         throws com.google.protobuf.InvalidProtocolBufferException {
33826       return PARSER.parseFrom(data, extensionRegistry);
33827     }
parseFrom(byte[] data)33828     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(byte[] data)
33829         throws com.google.protobuf.InvalidProtocolBufferException {
33830       return PARSER.parseFrom(data);
33831     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33832     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(
33833         byte[] data,
33834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33835         throws com.google.protobuf.InvalidProtocolBufferException {
33836       return PARSER.parseFrom(data, extensionRegistry);
33837     }
parseFrom(java.io.InputStream input)33838     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(java.io.InputStream input)
33839         throws java.io.IOException {
33840       return PARSER.parseFrom(input);
33841     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33842     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(
33843         java.io.InputStream input,
33844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33845         throws java.io.IOException {
33846       return PARSER.parseFrom(input, extensionRegistry);
33847     }
parseDelimitedFrom(java.io.InputStream input)33848     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseDelimitedFrom(java.io.InputStream input)
33849         throws java.io.IOException {
33850       return PARSER.parseDelimitedFrom(input);
33851     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33852     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseDelimitedFrom(
33853         java.io.InputStream input,
33854         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33855         throws java.io.IOException {
33856       return PARSER.parseDelimitedFrom(input, extensionRegistry);
33857     }
parseFrom( com.google.protobuf.CodedInputStream input)33858     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(
33859         com.google.protobuf.CodedInputStream input)
33860         throws java.io.IOException {
33861       return PARSER.parseFrom(input);
33862     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33863     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parseFrom(
33864         com.google.protobuf.CodedInputStream input,
33865         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33866         throws java.io.IOException {
33867       return PARSER.parseFrom(input, extensionRegistry);
33868     }
33869 
newBuilder()33870     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()33871     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse prototype)33872     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse prototype) {
33873       return newBuilder().mergeFrom(prototype);
33874     }
toBuilder()33875     public Builder toBuilder() { return newBuilder(this); }
33876 
33877     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)33878     protected Builder newBuilderForType(
33879         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33880       Builder builder = new Builder(parent);
33881       return builder;
33882     }
33883     /**
33884      * Protobuf type {@code GetCompletedSnapshotsResponse}
33885      */
33886     public static final class Builder extends
33887         com.google.protobuf.GeneratedMessage.Builder<Builder>
33888        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponseOrBuilder {
33889       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()33890           getDescriptor() {
33891         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_descriptor;
33892       }
33893 
33894       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()33895           internalGetFieldAccessorTable() {
33896         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable
33897             .ensureFieldAccessorsInitialized(
33898                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.Builder.class);
33899       }
33900 
33901       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.newBuilder()
Builder()33902       private Builder() {
33903         maybeForceBuilderInitialization();
33904       }
33905 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)33906       private Builder(
33907           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33908         super(parent);
33909         maybeForceBuilderInitialization();
33910       }
maybeForceBuilderInitialization()33911       private void maybeForceBuilderInitialization() {
33912         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
33913           getSnapshotsFieldBuilder();
33914         }
33915       }
create()33916       private static Builder create() {
33917         return new Builder();
33918       }
33919 
clear()33920       public Builder clear() {
33921         super.clear();
33922         if (snapshotsBuilder_ == null) {
33923           snapshots_ = java.util.Collections.emptyList();
33924           bitField0_ = (bitField0_ & ~0x00000001);
33925         } else {
33926           snapshotsBuilder_.clear();
33927         }
33928         return this;
33929       }
33930 
clone()33931       public Builder clone() {
33932         return create().mergeFrom(buildPartial());
33933       }
33934 
33935       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()33936           getDescriptorForType() {
33937         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetCompletedSnapshotsResponse_descriptor;
33938       }
33939 
getDefaultInstanceForType()33940       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getDefaultInstanceForType() {
33941         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
33942       }
33943 
build()33944       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse build() {
33945         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse result = buildPartial();
33946         if (!result.isInitialized()) {
33947           throw newUninitializedMessageException(result);
33948         }
33949         return result;
33950       }
33951 
buildPartial()33952       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse buildPartial() {
33953         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse(this);
33954         int from_bitField0_ = bitField0_;
33955         if (snapshotsBuilder_ == null) {
33956           if (((bitField0_ & 0x00000001) == 0x00000001)) {
33957             snapshots_ = java.util.Collections.unmodifiableList(snapshots_);
33958             bitField0_ = (bitField0_ & ~0x00000001);
33959           }
33960           result.snapshots_ = snapshots_;
33961         } else {
33962           result.snapshots_ = snapshotsBuilder_.build();
33963         }
33964         onBuilt();
33965         return result;
33966       }
33967 
mergeFrom(com.google.protobuf.Message other)33968       public Builder mergeFrom(com.google.protobuf.Message other) {
33969         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) {
33970           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse)other);
33971         } else {
33972           super.mergeFrom(other);
33973           return this;
33974         }
33975       }
33976 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse other)33977       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse other) {
33978         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance()) return this;
33979         if (snapshotsBuilder_ == null) {
33980           if (!other.snapshots_.isEmpty()) {
33981             if (snapshots_.isEmpty()) {
33982               snapshots_ = other.snapshots_;
33983               bitField0_ = (bitField0_ & ~0x00000001);
33984             } else {
33985               ensureSnapshotsIsMutable();
33986               snapshots_.addAll(other.snapshots_);
33987             }
33988             onChanged();
33989           }
33990         } else {
33991           if (!other.snapshots_.isEmpty()) {
33992             if (snapshotsBuilder_.isEmpty()) {
33993               snapshotsBuilder_.dispose();
33994               snapshotsBuilder_ = null;
33995               snapshots_ = other.snapshots_;
33996               bitField0_ = (bitField0_ & ~0x00000001);
33997               snapshotsBuilder_ =
33998                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
33999                    getSnapshotsFieldBuilder() : null;
34000             } else {
34001               snapshotsBuilder_.addAllMessages(other.snapshots_);
34002             }
34003           }
34004         }
34005         this.mergeUnknownFields(other.getUnknownFields());
34006         return this;
34007       }
34008 
isInitialized()34009       public final boolean isInitialized() {
34010         for (int i = 0; i < getSnapshotsCount(); i++) {
34011           if (!getSnapshots(i).isInitialized()) {
34012 
34013             return false;
34014           }
34015         }
34016         return true;
34017       }
34018 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34019       public Builder mergeFrom(
34020           com.google.protobuf.CodedInputStream input,
34021           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34022           throws java.io.IOException {
34023         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse parsedMessage = null;
34024         try {
34025           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
34026         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
34027           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) e.getUnfinishedMessage();
34028           throw e;
34029         } finally {
34030           if (parsedMessage != null) {
34031             mergeFrom(parsedMessage);
34032           }
34033         }
34034         return this;
34035       }
34036       private int bitField0_;
34037 
34038       // repeated .SnapshotDescription snapshots = 1;
34039       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> snapshots_ =
34040         java.util.Collections.emptyList();
ensureSnapshotsIsMutable()34041       private void ensureSnapshotsIsMutable() {
34042         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
34043           snapshots_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription>(snapshots_);
34044           bitField0_ |= 0x00000001;
34045          }
34046       }
34047 
34048       private com.google.protobuf.RepeatedFieldBuilder<
34049           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotsBuilder_;
34050 
34051       /**
34052        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34053        */
getSnapshotsList()34054       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> getSnapshotsList() {
34055         if (snapshotsBuilder_ == null) {
34056           return java.util.Collections.unmodifiableList(snapshots_);
34057         } else {
34058           return snapshotsBuilder_.getMessageList();
34059         }
34060       }
34061       /**
34062        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34063        */
getSnapshotsCount()34064       public int getSnapshotsCount() {
34065         if (snapshotsBuilder_ == null) {
34066           return snapshots_.size();
34067         } else {
34068           return snapshotsBuilder_.getCount();
34069         }
34070       }
34071       /**
34072        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34073        */
getSnapshots(int index)34074       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshots(int index) {
34075         if (snapshotsBuilder_ == null) {
34076           return snapshots_.get(index);
34077         } else {
34078           return snapshotsBuilder_.getMessage(index);
34079         }
34080       }
34081       /**
34082        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34083        */
setSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)34084       public Builder setSnapshots(
34085           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
34086         if (snapshotsBuilder_ == null) {
34087           if (value == null) {
34088             throw new NullPointerException();
34089           }
34090           ensureSnapshotsIsMutable();
34091           snapshots_.set(index, value);
34092           onChanged();
34093         } else {
34094           snapshotsBuilder_.setMessage(index, value);
34095         }
34096         return this;
34097       }
34098       /**
34099        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34100        */
setSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)34101       public Builder setSnapshots(
34102           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
34103         if (snapshotsBuilder_ == null) {
34104           ensureSnapshotsIsMutable();
34105           snapshots_.set(index, builderForValue.build());
34106           onChanged();
34107         } else {
34108           snapshotsBuilder_.setMessage(index, builderForValue.build());
34109         }
34110         return this;
34111       }
34112       /**
34113        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34114        */
addSnapshots(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)34115       public Builder addSnapshots(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
34116         if (snapshotsBuilder_ == null) {
34117           if (value == null) {
34118             throw new NullPointerException();
34119           }
34120           ensureSnapshotsIsMutable();
34121           snapshots_.add(value);
34122           onChanged();
34123         } else {
34124           snapshotsBuilder_.addMessage(value);
34125         }
34126         return this;
34127       }
34128       /**
34129        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34130        */
addSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)34131       public Builder addSnapshots(
34132           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
34133         if (snapshotsBuilder_ == null) {
34134           if (value == null) {
34135             throw new NullPointerException();
34136           }
34137           ensureSnapshotsIsMutable();
34138           snapshots_.add(index, value);
34139           onChanged();
34140         } else {
34141           snapshotsBuilder_.addMessage(index, value);
34142         }
34143         return this;
34144       }
34145       /**
34146        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34147        */
addSnapshots( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)34148       public Builder addSnapshots(
34149           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
34150         if (snapshotsBuilder_ == null) {
34151           ensureSnapshotsIsMutable();
34152           snapshots_.add(builderForValue.build());
34153           onChanged();
34154         } else {
34155           snapshotsBuilder_.addMessage(builderForValue.build());
34156         }
34157         return this;
34158       }
34159       /**
34160        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34161        */
addSnapshots( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)34162       public Builder addSnapshots(
34163           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
34164         if (snapshotsBuilder_ == null) {
34165           ensureSnapshotsIsMutable();
34166           snapshots_.add(index, builderForValue.build());
34167           onChanged();
34168         } else {
34169           snapshotsBuilder_.addMessage(index, builderForValue.build());
34170         }
34171         return this;
34172       }
34173       /**
34174        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34175        */
addAllSnapshots( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> values)34176       public Builder addAllSnapshots(
34177           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription> values) {
34178         if (snapshotsBuilder_ == null) {
34179           ensureSnapshotsIsMutable();
34180           super.addAll(values, snapshots_);
34181           onChanged();
34182         } else {
34183           snapshotsBuilder_.addAllMessages(values);
34184         }
34185         return this;
34186       }
34187       /**
34188        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34189        */
clearSnapshots()34190       public Builder clearSnapshots() {
34191         if (snapshotsBuilder_ == null) {
34192           snapshots_ = java.util.Collections.emptyList();
34193           bitField0_ = (bitField0_ & ~0x00000001);
34194           onChanged();
34195         } else {
34196           snapshotsBuilder_.clear();
34197         }
34198         return this;
34199       }
34200       /**
34201        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34202        */
removeSnapshots(int index)34203       public Builder removeSnapshots(int index) {
34204         if (snapshotsBuilder_ == null) {
34205           ensureSnapshotsIsMutable();
34206           snapshots_.remove(index);
34207           onChanged();
34208         } else {
34209           snapshotsBuilder_.remove(index);
34210         }
34211         return this;
34212       }
34213       /**
34214        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34215        */
getSnapshotsBuilder( int index)34216       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotsBuilder(
34217           int index) {
34218         return getSnapshotsFieldBuilder().getBuilder(index);
34219       }
34220       /**
34221        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34222        */
getSnapshotsOrBuilder( int index)34223       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotsOrBuilder(
34224           int index) {
34225         if (snapshotsBuilder_ == null) {
34226           return snapshots_.get(index);  } else {
34227           return snapshotsBuilder_.getMessageOrBuilder(index);
34228         }
34229       }
34230       /**
34231        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34232        */
34233       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotsOrBuilderList()34234            getSnapshotsOrBuilderList() {
34235         if (snapshotsBuilder_ != null) {
34236           return snapshotsBuilder_.getMessageOrBuilderList();
34237         } else {
34238           return java.util.Collections.unmodifiableList(snapshots_);
34239         }
34240       }
34241       /**
34242        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34243        */
addSnapshotsBuilder()34244       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder() {
34245         return getSnapshotsFieldBuilder().addBuilder(
34246             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance());
34247       }
34248       /**
34249        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34250        */
addSnapshotsBuilder( int index)34251       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder addSnapshotsBuilder(
34252           int index) {
34253         return getSnapshotsFieldBuilder().addBuilder(
34254             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance());
34255       }
34256       /**
34257        * <code>repeated .SnapshotDescription snapshots = 1;</code>
34258        */
34259       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder>
getSnapshotsBuilderList()34260            getSnapshotsBuilderList() {
34261         return getSnapshotsFieldBuilder().getBuilderList();
34262       }
34263       private com.google.protobuf.RepeatedFieldBuilder<
34264           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotsFieldBuilder()34265           getSnapshotsFieldBuilder() {
34266         if (snapshotsBuilder_ == null) {
34267           snapshotsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
34268               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
34269                   snapshots_,
34270                   ((bitField0_ & 0x00000001) == 0x00000001),
34271                   getParentForChildren(),
34272                   isClean());
34273           snapshots_ = null;
34274         }
34275         return snapshotsBuilder_;
34276       }
34277 
34278       // @@protoc_insertion_point(builder_scope:GetCompletedSnapshotsResponse)
34279     }
34280 
34281     static {
34282       defaultInstance = new GetCompletedSnapshotsResponse(true);
defaultInstance.initFields()34283       defaultInstance.initFields();
34284     }
34285 
34286     // @@protoc_insertion_point(class_scope:GetCompletedSnapshotsResponse)
34287   }
34288 
34289   public interface DeleteSnapshotRequestOrBuilder
34290       extends com.google.protobuf.MessageOrBuilder {
34291 
34292     // required .SnapshotDescription snapshot = 1;
34293     /**
34294      * <code>required .SnapshotDescription snapshot = 1;</code>
34295      */
hasSnapshot()34296     boolean hasSnapshot();
34297     /**
34298      * <code>required .SnapshotDescription snapshot = 1;</code>
34299      */
getSnapshot()34300     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
34301     /**
34302      * <code>required .SnapshotDescription snapshot = 1;</code>
34303      */
getSnapshotOrBuilder()34304     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
34305   }
34306   /**
34307    * Protobuf type {@code DeleteSnapshotRequest}
34308    */
34309   public static final class DeleteSnapshotRequest extends
34310       com.google.protobuf.GeneratedMessage
34311       implements DeleteSnapshotRequestOrBuilder {
34312     // Use DeleteSnapshotRequest.newBuilder() to construct.
DeleteSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)34313     private DeleteSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
34314       super(builder);
34315       this.unknownFields = builder.getUnknownFields();
34316     }
DeleteSnapshotRequest(boolean noInit)34317     private DeleteSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
34318 
34319     private static final DeleteSnapshotRequest defaultInstance;
getDefaultInstance()34320     public static DeleteSnapshotRequest getDefaultInstance() {
34321       return defaultInstance;
34322     }
34323 
getDefaultInstanceForType()34324     public DeleteSnapshotRequest getDefaultInstanceForType() {
34325       return defaultInstance;
34326     }
34327 
34328     private final com.google.protobuf.UnknownFieldSet unknownFields;
34329     @java.lang.Override
34330     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()34331         getUnknownFields() {
34332       return this.unknownFields;
34333     }
DeleteSnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34334     private DeleteSnapshotRequest(
34335         com.google.protobuf.CodedInputStream input,
34336         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34337         throws com.google.protobuf.InvalidProtocolBufferException {
34338       initFields();
34339       int mutable_bitField0_ = 0;
34340       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
34341           com.google.protobuf.UnknownFieldSet.newBuilder();
34342       try {
34343         boolean done = false;
34344         while (!done) {
34345           int tag = input.readTag();
34346           switch (tag) {
34347             case 0:
34348               done = true;
34349               break;
34350             default: {
34351               if (!parseUnknownField(input, unknownFields,
34352                                      extensionRegistry, tag)) {
34353                 done = true;
34354               }
34355               break;
34356             }
34357             case 10: {
34358               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
34359               if (((bitField0_ & 0x00000001) == 0x00000001)) {
34360                 subBuilder = snapshot_.toBuilder();
34361               }
34362               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
34363               if (subBuilder != null) {
34364                 subBuilder.mergeFrom(snapshot_);
34365                 snapshot_ = subBuilder.buildPartial();
34366               }
34367               bitField0_ |= 0x00000001;
34368               break;
34369             }
34370           }
34371         }
34372       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
34373         throw e.setUnfinishedMessage(this);
34374       } catch (java.io.IOException e) {
34375         throw new com.google.protobuf.InvalidProtocolBufferException(
34376             e.getMessage()).setUnfinishedMessage(this);
34377       } finally {
34378         this.unknownFields = unknownFields.build();
34379         makeExtensionsImmutable();
34380       }
34381     }
34382     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()34383         getDescriptor() {
34384       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_descriptor;
34385     }
34386 
34387     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()34388         internalGetFieldAccessorTable() {
34389       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable
34390           .ensureFieldAccessorsInitialized(
34391               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.Builder.class);
34392     }
34393 
34394     public static com.google.protobuf.Parser<DeleteSnapshotRequest> PARSER =
34395         new com.google.protobuf.AbstractParser<DeleteSnapshotRequest>() {
34396       public DeleteSnapshotRequest parsePartialFrom(
34397           com.google.protobuf.CodedInputStream input,
34398           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34399           throws com.google.protobuf.InvalidProtocolBufferException {
34400         return new DeleteSnapshotRequest(input, extensionRegistry);
34401       }
34402     };
34403 
34404     @java.lang.Override
getParserForType()34405     public com.google.protobuf.Parser<DeleteSnapshotRequest> getParserForType() {
34406       return PARSER;
34407     }
34408 
34409     private int bitField0_;
34410     // required .SnapshotDescription snapshot = 1;
34411     public static final int SNAPSHOT_FIELD_NUMBER = 1;
34412     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
34413     /**
34414      * <code>required .SnapshotDescription snapshot = 1;</code>
34415      */
hasSnapshot()34416     public boolean hasSnapshot() {
34417       return ((bitField0_ & 0x00000001) == 0x00000001);
34418     }
34419     /**
34420      * <code>required .SnapshotDescription snapshot = 1;</code>
34421      */
getSnapshot()34422     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
34423       return snapshot_;
34424     }
34425     /**
34426      * <code>required .SnapshotDescription snapshot = 1;</code>
34427      */
getSnapshotOrBuilder()34428     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
34429       return snapshot_;
34430     }
34431 
initFields()34432     private void initFields() {
34433       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
34434     }
34435     private byte memoizedIsInitialized = -1;
isInitialized()34436     public final boolean isInitialized() {
34437       byte isInitialized = memoizedIsInitialized;
34438       if (isInitialized != -1) return isInitialized == 1;
34439 
34440       if (!hasSnapshot()) {
34441         memoizedIsInitialized = 0;
34442         return false;
34443       }
34444       if (!getSnapshot().isInitialized()) {
34445         memoizedIsInitialized = 0;
34446         return false;
34447       }
34448       memoizedIsInitialized = 1;
34449       return true;
34450     }
34451 
writeTo(com.google.protobuf.CodedOutputStream output)34452     public void writeTo(com.google.protobuf.CodedOutputStream output)
34453                         throws java.io.IOException {
34454       getSerializedSize();
34455       if (((bitField0_ & 0x00000001) == 0x00000001)) {
34456         output.writeMessage(1, snapshot_);
34457       }
34458       getUnknownFields().writeTo(output);
34459     }
34460 
34461     private int memoizedSerializedSize = -1;
getSerializedSize()34462     public int getSerializedSize() {
34463       int size = memoizedSerializedSize;
34464       if (size != -1) return size;
34465 
34466       size = 0;
34467       if (((bitField0_ & 0x00000001) == 0x00000001)) {
34468         size += com.google.protobuf.CodedOutputStream
34469           .computeMessageSize(1, snapshot_);
34470       }
34471       size += getUnknownFields().getSerializedSize();
34472       memoizedSerializedSize = size;
34473       return size;
34474     }
34475 
34476     private static final long serialVersionUID = 0L;
34477     @java.lang.Override
writeReplace()34478     protected java.lang.Object writeReplace()
34479         throws java.io.ObjectStreamException {
34480       return super.writeReplace();
34481     }
34482 
34483     @java.lang.Override
equals(final java.lang.Object obj)34484     public boolean equals(final java.lang.Object obj) {
34485       if (obj == this) {
34486        return true;
34487       }
34488       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)) {
34489         return super.equals(obj);
34490       }
34491       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest) obj;
34492 
34493       boolean result = true;
34494       result = result && (hasSnapshot() == other.hasSnapshot());
34495       if (hasSnapshot()) {
34496         result = result && getSnapshot()
34497             .equals(other.getSnapshot());
34498       }
34499       result = result &&
34500           getUnknownFields().equals(other.getUnknownFields());
34501       return result;
34502     }
34503 
34504     private int memoizedHashCode = 0;
34505     @java.lang.Override
hashCode()34506     public int hashCode() {
34507       if (memoizedHashCode != 0) {
34508         return memoizedHashCode;
34509       }
34510       int hash = 41;
34511       hash = (19 * hash) + getDescriptorForType().hashCode();
34512       if (hasSnapshot()) {
34513         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
34514         hash = (53 * hash) + getSnapshot().hashCode();
34515       }
34516       hash = (29 * hash) + getUnknownFields().hashCode();
34517       memoizedHashCode = hash;
34518       return hash;
34519     }
34520 
parseFrom( com.google.protobuf.ByteString data)34521     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(
34522         com.google.protobuf.ByteString data)
34523         throws com.google.protobuf.InvalidProtocolBufferException {
34524       return PARSER.parseFrom(data);
34525     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34526     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(
34527         com.google.protobuf.ByteString data,
34528         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34529         throws com.google.protobuf.InvalidProtocolBufferException {
34530       return PARSER.parseFrom(data, extensionRegistry);
34531     }
parseFrom(byte[] data)34532     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(byte[] data)
34533         throws com.google.protobuf.InvalidProtocolBufferException {
34534       return PARSER.parseFrom(data);
34535     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34536     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(
34537         byte[] data,
34538         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34539         throws com.google.protobuf.InvalidProtocolBufferException {
34540       return PARSER.parseFrom(data, extensionRegistry);
34541     }
parseFrom(java.io.InputStream input)34542     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(java.io.InputStream input)
34543         throws java.io.IOException {
34544       return PARSER.parseFrom(input);
34545     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34546     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(
34547         java.io.InputStream input,
34548         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34549         throws java.io.IOException {
34550       return PARSER.parseFrom(input, extensionRegistry);
34551     }
parseDelimitedFrom(java.io.InputStream input)34552     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseDelimitedFrom(java.io.InputStream input)
34553         throws java.io.IOException {
34554       return PARSER.parseDelimitedFrom(input);
34555     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34556     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseDelimitedFrom(
34557         java.io.InputStream input,
34558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34559         throws java.io.IOException {
34560       return PARSER.parseDelimitedFrom(input, extensionRegistry);
34561     }
parseFrom( com.google.protobuf.CodedInputStream input)34562     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(
34563         com.google.protobuf.CodedInputStream input)
34564         throws java.io.IOException {
34565       return PARSER.parseFrom(input);
34566     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34567     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parseFrom(
34568         com.google.protobuf.CodedInputStream input,
34569         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34570         throws java.io.IOException {
34571       return PARSER.parseFrom(input, extensionRegistry);
34572     }
34573 
newBuilder()34574     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()34575     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest prototype)34576     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest prototype) {
34577       return newBuilder().mergeFrom(prototype);
34578     }
toBuilder()34579     public Builder toBuilder() { return newBuilder(this); }
34580 
34581     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)34582     protected Builder newBuilderForType(
34583         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
34584       Builder builder = new Builder(parent);
34585       return builder;
34586     }
34587     /**
34588      * Protobuf type {@code DeleteSnapshotRequest}
34589      */
34590     public static final class Builder extends
34591         com.google.protobuf.GeneratedMessage.Builder<Builder>
34592        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequestOrBuilder {
34593       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()34594           getDescriptor() {
34595         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_descriptor;
34596       }
34597 
34598       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()34599           internalGetFieldAccessorTable() {
34600         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_fieldAccessorTable
34601             .ensureFieldAccessorsInitialized(
34602                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.Builder.class);
34603       }
34604 
34605       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.newBuilder()
Builder()34606       private Builder() {
34607         maybeForceBuilderInitialization();
34608       }
34609 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)34610       private Builder(
34611           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
34612         super(parent);
34613         maybeForceBuilderInitialization();
34614       }
maybeForceBuilderInitialization()34615       private void maybeForceBuilderInitialization() {
34616         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
34617           getSnapshotFieldBuilder();
34618         }
34619       }
create()34620       private static Builder create() {
34621         return new Builder();
34622       }
34623 
clear()34624       public Builder clear() {
34625         super.clear();
34626         if (snapshotBuilder_ == null) {
34627           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
34628         } else {
34629           snapshotBuilder_.clear();
34630         }
34631         bitField0_ = (bitField0_ & ~0x00000001);
34632         return this;
34633       }
34634 
clone()34635       public Builder clone() {
34636         return create().mergeFrom(buildPartial());
34637       }
34638 
34639       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()34640           getDescriptorForType() {
34641         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotRequest_descriptor;
34642       }
34643 
getDefaultInstanceForType()34644       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest getDefaultInstanceForType() {
34645         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
34646       }
34647 
build()34648       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest build() {
34649         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest result = buildPartial();
34650         if (!result.isInitialized()) {
34651           throw newUninitializedMessageException(result);
34652         }
34653         return result;
34654       }
34655 
buildPartial()34656       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest buildPartial() {
34657         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest(this);
34658         int from_bitField0_ = bitField0_;
34659         int to_bitField0_ = 0;
34660         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
34661           to_bitField0_ |= 0x00000001;
34662         }
34663         if (snapshotBuilder_ == null) {
34664           result.snapshot_ = snapshot_;
34665         } else {
34666           result.snapshot_ = snapshotBuilder_.build();
34667         }
34668         result.bitField0_ = to_bitField0_;
34669         onBuilt();
34670         return result;
34671       }
34672 
mergeFrom(com.google.protobuf.Message other)34673       public Builder mergeFrom(com.google.protobuf.Message other) {
34674         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest) {
34675           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)other);
34676         } else {
34677           super.mergeFrom(other);
34678           return this;
34679         }
34680       }
34681 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest other)34682       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest other) {
34683         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance()) return this;
34684         if (other.hasSnapshot()) {
34685           mergeSnapshot(other.getSnapshot());
34686         }
34687         this.mergeUnknownFields(other.getUnknownFields());
34688         return this;
34689       }
34690 
isInitialized()34691       public final boolean isInitialized() {
34692         if (!hasSnapshot()) {
34693 
34694           return false;
34695         }
34696         if (!getSnapshot().isInitialized()) {
34697 
34698           return false;
34699         }
34700         return true;
34701       }
34702 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34703       public Builder mergeFrom(
34704           com.google.protobuf.CodedInputStream input,
34705           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34706           throws java.io.IOException {
34707         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest parsedMessage = null;
34708         try {
34709           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
34710         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
34711           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest) e.getUnfinishedMessage();
34712           throw e;
34713         } finally {
34714           if (parsedMessage != null) {
34715             mergeFrom(parsedMessage);
34716           }
34717         }
34718         return this;
34719       }
34720       private int bitField0_;
34721 
34722       // required .SnapshotDescription snapshot = 1;
34723       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
34724       private com.google.protobuf.SingleFieldBuilder<
34725           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
34726       /**
34727        * <code>required .SnapshotDescription snapshot = 1;</code>
34728        */
hasSnapshot()34729       public boolean hasSnapshot() {
34730         return ((bitField0_ & 0x00000001) == 0x00000001);
34731       }
34732       /**
34733        * <code>required .SnapshotDescription snapshot = 1;</code>
34734        */
getSnapshot()34735       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
34736         if (snapshotBuilder_ == null) {
34737           return snapshot_;
34738         } else {
34739           return snapshotBuilder_.getMessage();
34740         }
34741       }
34742       /**
34743        * <code>required .SnapshotDescription snapshot = 1;</code>
34744        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)34745       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
34746         if (snapshotBuilder_ == null) {
34747           if (value == null) {
34748             throw new NullPointerException();
34749           }
34750           snapshot_ = value;
34751           onChanged();
34752         } else {
34753           snapshotBuilder_.setMessage(value);
34754         }
34755         bitField0_ |= 0x00000001;
34756         return this;
34757       }
34758       /**
34759        * <code>required .SnapshotDescription snapshot = 1;</code>
34760        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)34761       public Builder setSnapshot(
34762           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
34763         if (snapshotBuilder_ == null) {
34764           snapshot_ = builderForValue.build();
34765           onChanged();
34766         } else {
34767           snapshotBuilder_.setMessage(builderForValue.build());
34768         }
34769         bitField0_ |= 0x00000001;
34770         return this;
34771       }
34772       /**
34773        * <code>required .SnapshotDescription snapshot = 1;</code>
34774        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)34775       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
34776         if (snapshotBuilder_ == null) {
34777           if (((bitField0_ & 0x00000001) == 0x00000001) &&
34778               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
34779             snapshot_ =
34780               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
34781           } else {
34782             snapshot_ = value;
34783           }
34784           onChanged();
34785         } else {
34786           snapshotBuilder_.mergeFrom(value);
34787         }
34788         bitField0_ |= 0x00000001;
34789         return this;
34790       }
34791       /**
34792        * <code>required .SnapshotDescription snapshot = 1;</code>
34793        */
clearSnapshot()34794       public Builder clearSnapshot() {
34795         if (snapshotBuilder_ == null) {
34796           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
34797           onChanged();
34798         } else {
34799           snapshotBuilder_.clear();
34800         }
34801         bitField0_ = (bitField0_ & ~0x00000001);
34802         return this;
34803       }
34804       /**
34805        * <code>required .SnapshotDescription snapshot = 1;</code>
34806        */
getSnapshotBuilder()34807       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
34808         bitField0_ |= 0x00000001;
34809         onChanged();
34810         return getSnapshotFieldBuilder().getBuilder();
34811       }
34812       /**
34813        * <code>required .SnapshotDescription snapshot = 1;</code>
34814        */
getSnapshotOrBuilder()34815       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
34816         if (snapshotBuilder_ != null) {
34817           return snapshotBuilder_.getMessageOrBuilder();
34818         } else {
34819           return snapshot_;
34820         }
34821       }
34822       /**
34823        * <code>required .SnapshotDescription snapshot = 1;</code>
34824        */
34825       private com.google.protobuf.SingleFieldBuilder<
34826           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotFieldBuilder()34827           getSnapshotFieldBuilder() {
34828         if (snapshotBuilder_ == null) {
34829           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
34830               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
34831                   snapshot_,
34832                   getParentForChildren(),
34833                   isClean());
34834           snapshot_ = null;
34835         }
34836         return snapshotBuilder_;
34837       }
34838 
34839       // @@protoc_insertion_point(builder_scope:DeleteSnapshotRequest)
34840     }
34841 
34842     static {
34843       defaultInstance = new DeleteSnapshotRequest(true);
defaultInstance.initFields()34844       defaultInstance.initFields();
34845     }
34846 
34847     // @@protoc_insertion_point(class_scope:DeleteSnapshotRequest)
34848   }
34849 
34850   public interface DeleteSnapshotResponseOrBuilder
34851       extends com.google.protobuf.MessageOrBuilder {
34852   }
34853   /**
34854    * Protobuf type {@code DeleteSnapshotResponse}
34855    */
34856   public static final class DeleteSnapshotResponse extends
34857       com.google.protobuf.GeneratedMessage
34858       implements DeleteSnapshotResponseOrBuilder {
34859     // Use DeleteSnapshotResponse.newBuilder() to construct.
DeleteSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)34860     private DeleteSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
34861       super(builder);
34862       this.unknownFields = builder.getUnknownFields();
34863     }
DeleteSnapshotResponse(boolean noInit)34864     private DeleteSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
34865 
34866     private static final DeleteSnapshotResponse defaultInstance;
getDefaultInstance()34867     public static DeleteSnapshotResponse getDefaultInstance() {
34868       return defaultInstance;
34869     }
34870 
getDefaultInstanceForType()34871     public DeleteSnapshotResponse getDefaultInstanceForType() {
34872       return defaultInstance;
34873     }
34874 
34875     private final com.google.protobuf.UnknownFieldSet unknownFields;
34876     @java.lang.Override
34877     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()34878         getUnknownFields() {
34879       return this.unknownFields;
34880     }
DeleteSnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)34881     private DeleteSnapshotResponse(
34882         com.google.protobuf.CodedInputStream input,
34883         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34884         throws com.google.protobuf.InvalidProtocolBufferException {
34885       initFields();
34886       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
34887           com.google.protobuf.UnknownFieldSet.newBuilder();
34888       try {
34889         boolean done = false;
34890         while (!done) {
34891           int tag = input.readTag();
34892           switch (tag) {
34893             case 0:
34894               done = true;
34895               break;
34896             default: {
34897               if (!parseUnknownField(input, unknownFields,
34898                                      extensionRegistry, tag)) {
34899                 done = true;
34900               }
34901               break;
34902             }
34903           }
34904         }
34905       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
34906         throw e.setUnfinishedMessage(this);
34907       } catch (java.io.IOException e) {
34908         throw new com.google.protobuf.InvalidProtocolBufferException(
34909             e.getMessage()).setUnfinishedMessage(this);
34910       } finally {
34911         this.unknownFields = unknownFields.build();
34912         makeExtensionsImmutable();
34913       }
34914     }
34915     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()34916         getDescriptor() {
34917       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_descriptor;
34918     }
34919 
34920     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()34921         internalGetFieldAccessorTable() {
34922       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable
34923           .ensureFieldAccessorsInitialized(
34924               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.Builder.class);
34925     }
34926 
34927     public static com.google.protobuf.Parser<DeleteSnapshotResponse> PARSER =
34928         new com.google.protobuf.AbstractParser<DeleteSnapshotResponse>() {
34929       public DeleteSnapshotResponse parsePartialFrom(
34930           com.google.protobuf.CodedInputStream input,
34931           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
34932           throws com.google.protobuf.InvalidProtocolBufferException {
34933         return new DeleteSnapshotResponse(input, extensionRegistry);
34934       }
34935     };
34936 
34937     @java.lang.Override
getParserForType()34938     public com.google.protobuf.Parser<DeleteSnapshotResponse> getParserForType() {
34939       return PARSER;
34940     }
34941 
initFields()34942     private void initFields() {
34943     }
34944     private byte memoizedIsInitialized = -1;
isInitialized()34945     public final boolean isInitialized() {
34946       byte isInitialized = memoizedIsInitialized;
34947       if (isInitialized != -1) return isInitialized == 1;
34948 
34949       memoizedIsInitialized = 1;
34950       return true;
34951     }
34952 
writeTo(com.google.protobuf.CodedOutputStream output)34953     public void writeTo(com.google.protobuf.CodedOutputStream output)
34954                         throws java.io.IOException {
34955       getSerializedSize();
34956       getUnknownFields().writeTo(output);
34957     }
34958 
34959     private int memoizedSerializedSize = -1;
getSerializedSize()34960     public int getSerializedSize() {
34961       int size = memoizedSerializedSize;
34962       if (size != -1) return size;
34963 
34964       size = 0;
34965       size += getUnknownFields().getSerializedSize();
34966       memoizedSerializedSize = size;
34967       return size;
34968     }
34969 
34970     private static final long serialVersionUID = 0L;
34971     @java.lang.Override
writeReplace()34972     protected java.lang.Object writeReplace()
34973         throws java.io.ObjectStreamException {
34974       return super.writeReplace();
34975     }
34976 
34977     @java.lang.Override
equals(final java.lang.Object obj)34978     public boolean equals(final java.lang.Object obj) {
34979       if (obj == this) {
34980        return true;
34981       }
34982       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse)) {
34983         return super.equals(obj);
34984       }
34985       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) obj;
34986 
34987       boolean result = true;
34988       result = result &&
34989           getUnknownFields().equals(other.getUnknownFields());
34990       return result;
34991     }
34992 
34993     private int memoizedHashCode = 0;
34994     @java.lang.Override
hashCode()34995     public int hashCode() {
34996       if (memoizedHashCode != 0) {
34997         return memoizedHashCode;
34998       }
34999       int hash = 41;
35000       hash = (19 * hash) + getDescriptorForType().hashCode();
35001       hash = (29 * hash) + getUnknownFields().hashCode();
35002       memoizedHashCode = hash;
35003       return hash;
35004     }
35005 
parseFrom( com.google.protobuf.ByteString data)35006     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(
35007         com.google.protobuf.ByteString data)
35008         throws com.google.protobuf.InvalidProtocolBufferException {
35009       return PARSER.parseFrom(data);
35010     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35011     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(
35012         com.google.protobuf.ByteString data,
35013         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35014         throws com.google.protobuf.InvalidProtocolBufferException {
35015       return PARSER.parseFrom(data, extensionRegistry);
35016     }
parseFrom(byte[] data)35017     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(byte[] data)
35018         throws com.google.protobuf.InvalidProtocolBufferException {
35019       return PARSER.parseFrom(data);
35020     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35021     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(
35022         byte[] data,
35023         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35024         throws com.google.protobuf.InvalidProtocolBufferException {
35025       return PARSER.parseFrom(data, extensionRegistry);
35026     }
parseFrom(java.io.InputStream input)35027     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(java.io.InputStream input)
35028         throws java.io.IOException {
35029       return PARSER.parseFrom(input);
35030     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35031     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(
35032         java.io.InputStream input,
35033         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35034         throws java.io.IOException {
35035       return PARSER.parseFrom(input, extensionRegistry);
35036     }
parseDelimitedFrom(java.io.InputStream input)35037     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseDelimitedFrom(java.io.InputStream input)
35038         throws java.io.IOException {
35039       return PARSER.parseDelimitedFrom(input);
35040     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35041     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseDelimitedFrom(
35042         java.io.InputStream input,
35043         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35044         throws java.io.IOException {
35045       return PARSER.parseDelimitedFrom(input, extensionRegistry);
35046     }
parseFrom( com.google.protobuf.CodedInputStream input)35047     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(
35048         com.google.protobuf.CodedInputStream input)
35049         throws java.io.IOException {
35050       return PARSER.parseFrom(input);
35051     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35052     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parseFrom(
35053         com.google.protobuf.CodedInputStream input,
35054         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35055         throws java.io.IOException {
35056       return PARSER.parseFrom(input, extensionRegistry);
35057     }
35058 
newBuilder()35059     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()35060     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse prototype)35061     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse prototype) {
35062       return newBuilder().mergeFrom(prototype);
35063     }
toBuilder()35064     public Builder toBuilder() { return newBuilder(this); }
35065 
35066     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)35067     protected Builder newBuilderForType(
35068         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
35069       Builder builder = new Builder(parent);
35070       return builder;
35071     }
35072     /**
35073      * Protobuf type {@code DeleteSnapshotResponse}
35074      */
35075     public static final class Builder extends
35076         com.google.protobuf.GeneratedMessage.Builder<Builder>
35077        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponseOrBuilder {
35078       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()35079           getDescriptor() {
35080         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_descriptor;
35081       }
35082 
35083       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()35084           internalGetFieldAccessorTable() {
35085         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_fieldAccessorTable
35086             .ensureFieldAccessorsInitialized(
35087                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.Builder.class);
35088       }
35089 
35090       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.newBuilder()
Builder()35091       private Builder() {
35092         maybeForceBuilderInitialization();
35093       }
35094 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)35095       private Builder(
35096           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
35097         super(parent);
35098         maybeForceBuilderInitialization();
35099       }
maybeForceBuilderInitialization()35100       private void maybeForceBuilderInitialization() {
35101         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
35102         }
35103       }
create()35104       private static Builder create() {
35105         return new Builder();
35106       }
35107 
clear()35108       public Builder clear() {
35109         super.clear();
35110         return this;
35111       }
35112 
clone()35113       public Builder clone() {
35114         return create().mergeFrom(buildPartial());
35115       }
35116 
35117       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()35118           getDescriptorForType() {
35119         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_DeleteSnapshotResponse_descriptor;
35120       }
35121 
getDefaultInstanceForType()35122       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse getDefaultInstanceForType() {
35123         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
35124       }
35125 
build()35126       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse build() {
35127         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse result = buildPartial();
35128         if (!result.isInitialized()) {
35129           throw newUninitializedMessageException(result);
35130         }
35131         return result;
35132       }
35133 
buildPartial()35134       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse buildPartial() {
35135         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse(this);
35136         onBuilt();
35137         return result;
35138       }
35139 
mergeFrom(com.google.protobuf.Message other)35140       public Builder mergeFrom(com.google.protobuf.Message other) {
35141         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) {
35142           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse)other);
35143         } else {
35144           super.mergeFrom(other);
35145           return this;
35146         }
35147       }
35148 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse other)35149       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse other) {
35150         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance()) return this;
35151         this.mergeUnknownFields(other.getUnknownFields());
35152         return this;
35153       }
35154 
isInitialized()35155       public final boolean isInitialized() {
35156         return true;
35157       }
35158 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35159       public Builder mergeFrom(
35160           com.google.protobuf.CodedInputStream input,
35161           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35162           throws java.io.IOException {
35163         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse parsedMessage = null;
35164         try {
35165           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
35166         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
35167           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) e.getUnfinishedMessage();
35168           throw e;
35169         } finally {
35170           if (parsedMessage != null) {
35171             mergeFrom(parsedMessage);
35172           }
35173         }
35174         return this;
35175       }
35176 
35177       // @@protoc_insertion_point(builder_scope:DeleteSnapshotResponse)
35178     }
35179 
35180     static {
35181       defaultInstance = new DeleteSnapshotResponse(true);
defaultInstance.initFields()35182       defaultInstance.initFields();
35183     }
35184 
35185     // @@protoc_insertion_point(class_scope:DeleteSnapshotResponse)
35186   }
35187 
35188   public interface RestoreSnapshotRequestOrBuilder
35189       extends com.google.protobuf.MessageOrBuilder {
35190 
35191     // required .SnapshotDescription snapshot = 1;
35192     /**
35193      * <code>required .SnapshotDescription snapshot = 1;</code>
35194      */
hasSnapshot()35195     boolean hasSnapshot();
35196     /**
35197      * <code>required .SnapshotDescription snapshot = 1;</code>
35198      */
getSnapshot()35199     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
35200     /**
35201      * <code>required .SnapshotDescription snapshot = 1;</code>
35202      */
getSnapshotOrBuilder()35203     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
35204   }
35205   /**
35206    * Protobuf type {@code RestoreSnapshotRequest}
35207    */
35208   public static final class RestoreSnapshotRequest extends
35209       com.google.protobuf.GeneratedMessage
35210       implements RestoreSnapshotRequestOrBuilder {
35211     // Use RestoreSnapshotRequest.newBuilder() to construct.
RestoreSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)35212     private RestoreSnapshotRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
35213       super(builder);
35214       this.unknownFields = builder.getUnknownFields();
35215     }
RestoreSnapshotRequest(boolean noInit)35216     private RestoreSnapshotRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
35217 
35218     private static final RestoreSnapshotRequest defaultInstance;
getDefaultInstance()35219     public static RestoreSnapshotRequest getDefaultInstance() {
35220       return defaultInstance;
35221     }
35222 
getDefaultInstanceForType()35223     public RestoreSnapshotRequest getDefaultInstanceForType() {
35224       return defaultInstance;
35225     }
35226 
35227     private final com.google.protobuf.UnknownFieldSet unknownFields;
35228     @java.lang.Override
35229     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()35230         getUnknownFields() {
35231       return this.unknownFields;
35232     }
RestoreSnapshotRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35233     private RestoreSnapshotRequest(
35234         com.google.protobuf.CodedInputStream input,
35235         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35236         throws com.google.protobuf.InvalidProtocolBufferException {
35237       initFields();
35238       int mutable_bitField0_ = 0;
35239       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
35240           com.google.protobuf.UnknownFieldSet.newBuilder();
35241       try {
35242         boolean done = false;
35243         while (!done) {
35244           int tag = input.readTag();
35245           switch (tag) {
35246             case 0:
35247               done = true;
35248               break;
35249             default: {
35250               if (!parseUnknownField(input, unknownFields,
35251                                      extensionRegistry, tag)) {
35252                 done = true;
35253               }
35254               break;
35255             }
35256             case 10: {
35257               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
35258               if (((bitField0_ & 0x00000001) == 0x00000001)) {
35259                 subBuilder = snapshot_.toBuilder();
35260               }
35261               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
35262               if (subBuilder != null) {
35263                 subBuilder.mergeFrom(snapshot_);
35264                 snapshot_ = subBuilder.buildPartial();
35265               }
35266               bitField0_ |= 0x00000001;
35267               break;
35268             }
35269           }
35270         }
35271       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
35272         throw e.setUnfinishedMessage(this);
35273       } catch (java.io.IOException e) {
35274         throw new com.google.protobuf.InvalidProtocolBufferException(
35275             e.getMessage()).setUnfinishedMessage(this);
35276       } finally {
35277         this.unknownFields = unknownFields.build();
35278         makeExtensionsImmutable();
35279       }
35280     }
35281     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()35282         getDescriptor() {
35283       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_descriptor;
35284     }
35285 
35286     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()35287         internalGetFieldAccessorTable() {
35288       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable
35289           .ensureFieldAccessorsInitialized(
35290               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.Builder.class);
35291     }
35292 
35293     public static com.google.protobuf.Parser<RestoreSnapshotRequest> PARSER =
35294         new com.google.protobuf.AbstractParser<RestoreSnapshotRequest>() {
35295       public RestoreSnapshotRequest parsePartialFrom(
35296           com.google.protobuf.CodedInputStream input,
35297           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35298           throws com.google.protobuf.InvalidProtocolBufferException {
35299         return new RestoreSnapshotRequest(input, extensionRegistry);
35300       }
35301     };
35302 
35303     @java.lang.Override
getParserForType()35304     public com.google.protobuf.Parser<RestoreSnapshotRequest> getParserForType() {
35305       return PARSER;
35306     }
35307 
35308     private int bitField0_;
35309     // required .SnapshotDescription snapshot = 1;
35310     public static final int SNAPSHOT_FIELD_NUMBER = 1;
35311     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
35312     /**
35313      * <code>required .SnapshotDescription snapshot = 1;</code>
35314      */
hasSnapshot()35315     public boolean hasSnapshot() {
35316       return ((bitField0_ & 0x00000001) == 0x00000001);
35317     }
35318     /**
35319      * <code>required .SnapshotDescription snapshot = 1;</code>
35320      */
getSnapshot()35321     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
35322       return snapshot_;
35323     }
35324     /**
35325      * <code>required .SnapshotDescription snapshot = 1;</code>
35326      */
getSnapshotOrBuilder()35327     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
35328       return snapshot_;
35329     }
35330 
initFields()35331     private void initFields() {
35332       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
35333     }
35334     private byte memoizedIsInitialized = -1;
isInitialized()35335     public final boolean isInitialized() {
35336       byte isInitialized = memoizedIsInitialized;
35337       if (isInitialized != -1) return isInitialized == 1;
35338 
35339       if (!hasSnapshot()) {
35340         memoizedIsInitialized = 0;
35341         return false;
35342       }
35343       if (!getSnapshot().isInitialized()) {
35344         memoizedIsInitialized = 0;
35345         return false;
35346       }
35347       memoizedIsInitialized = 1;
35348       return true;
35349     }
35350 
writeTo(com.google.protobuf.CodedOutputStream output)35351     public void writeTo(com.google.protobuf.CodedOutputStream output)
35352                         throws java.io.IOException {
35353       getSerializedSize();
35354       if (((bitField0_ & 0x00000001) == 0x00000001)) {
35355         output.writeMessage(1, snapshot_);
35356       }
35357       getUnknownFields().writeTo(output);
35358     }
35359 
35360     private int memoizedSerializedSize = -1;
getSerializedSize()35361     public int getSerializedSize() {
35362       int size = memoizedSerializedSize;
35363       if (size != -1) return size;
35364 
35365       size = 0;
35366       if (((bitField0_ & 0x00000001) == 0x00000001)) {
35367         size += com.google.protobuf.CodedOutputStream
35368           .computeMessageSize(1, snapshot_);
35369       }
35370       size += getUnknownFields().getSerializedSize();
35371       memoizedSerializedSize = size;
35372       return size;
35373     }
35374 
35375     private static final long serialVersionUID = 0L;
35376     @java.lang.Override
writeReplace()35377     protected java.lang.Object writeReplace()
35378         throws java.io.ObjectStreamException {
35379       return super.writeReplace();
35380     }
35381 
35382     @java.lang.Override
equals(final java.lang.Object obj)35383     public boolean equals(final java.lang.Object obj) {
35384       if (obj == this) {
35385        return true;
35386       }
35387       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)) {
35388         return super.equals(obj);
35389       }
35390       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest) obj;
35391 
35392       boolean result = true;
35393       result = result && (hasSnapshot() == other.hasSnapshot());
35394       if (hasSnapshot()) {
35395         result = result && getSnapshot()
35396             .equals(other.getSnapshot());
35397       }
35398       result = result &&
35399           getUnknownFields().equals(other.getUnknownFields());
35400       return result;
35401     }
35402 
35403     private int memoizedHashCode = 0;
35404     @java.lang.Override
hashCode()35405     public int hashCode() {
35406       if (memoizedHashCode != 0) {
35407         return memoizedHashCode;
35408       }
35409       int hash = 41;
35410       hash = (19 * hash) + getDescriptorForType().hashCode();
35411       if (hasSnapshot()) {
35412         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
35413         hash = (53 * hash) + getSnapshot().hashCode();
35414       }
35415       hash = (29 * hash) + getUnknownFields().hashCode();
35416       memoizedHashCode = hash;
35417       return hash;
35418     }
35419 
parseFrom( com.google.protobuf.ByteString data)35420     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(
35421         com.google.protobuf.ByteString data)
35422         throws com.google.protobuf.InvalidProtocolBufferException {
35423       return PARSER.parseFrom(data);
35424     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35425     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(
35426         com.google.protobuf.ByteString data,
35427         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35428         throws com.google.protobuf.InvalidProtocolBufferException {
35429       return PARSER.parseFrom(data, extensionRegistry);
35430     }
parseFrom(byte[] data)35431     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(byte[] data)
35432         throws com.google.protobuf.InvalidProtocolBufferException {
35433       return PARSER.parseFrom(data);
35434     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35435     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(
35436         byte[] data,
35437         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35438         throws com.google.protobuf.InvalidProtocolBufferException {
35439       return PARSER.parseFrom(data, extensionRegistry);
35440     }
parseFrom(java.io.InputStream input)35441     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(java.io.InputStream input)
35442         throws java.io.IOException {
35443       return PARSER.parseFrom(input);
35444     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35445     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(
35446         java.io.InputStream input,
35447         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35448         throws java.io.IOException {
35449       return PARSER.parseFrom(input, extensionRegistry);
35450     }
parseDelimitedFrom(java.io.InputStream input)35451     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseDelimitedFrom(java.io.InputStream input)
35452         throws java.io.IOException {
35453       return PARSER.parseDelimitedFrom(input);
35454     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35455     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseDelimitedFrom(
35456         java.io.InputStream input,
35457         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35458         throws java.io.IOException {
35459       return PARSER.parseDelimitedFrom(input, extensionRegistry);
35460     }
parseFrom( com.google.protobuf.CodedInputStream input)35461     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(
35462         com.google.protobuf.CodedInputStream input)
35463         throws java.io.IOException {
35464       return PARSER.parseFrom(input);
35465     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35466     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parseFrom(
35467         com.google.protobuf.CodedInputStream input,
35468         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35469         throws java.io.IOException {
35470       return PARSER.parseFrom(input, extensionRegistry);
35471     }
35472 
newBuilder()35473     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()35474     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest prototype)35475     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest prototype) {
35476       return newBuilder().mergeFrom(prototype);
35477     }
toBuilder()35478     public Builder toBuilder() { return newBuilder(this); }
35479 
35480     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)35481     protected Builder newBuilderForType(
35482         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
35483       Builder builder = new Builder(parent);
35484       return builder;
35485     }
35486     /**
35487      * Protobuf type {@code RestoreSnapshotRequest}
35488      */
35489     public static final class Builder extends
35490         com.google.protobuf.GeneratedMessage.Builder<Builder>
35491        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequestOrBuilder {
35492       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()35493           getDescriptor() {
35494         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_descriptor;
35495       }
35496 
35497       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()35498           internalGetFieldAccessorTable() {
35499         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_fieldAccessorTable
35500             .ensureFieldAccessorsInitialized(
35501                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.Builder.class);
35502       }
35503 
35504       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.newBuilder()
Builder()35505       private Builder() {
35506         maybeForceBuilderInitialization();
35507       }
35508 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)35509       private Builder(
35510           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
35511         super(parent);
35512         maybeForceBuilderInitialization();
35513       }
maybeForceBuilderInitialization()35514       private void maybeForceBuilderInitialization() {
35515         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
35516           getSnapshotFieldBuilder();
35517         }
35518       }
create()35519       private static Builder create() {
35520         return new Builder();
35521       }
35522 
clear()35523       public Builder clear() {
35524         super.clear();
35525         if (snapshotBuilder_ == null) {
35526           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
35527         } else {
35528           snapshotBuilder_.clear();
35529         }
35530         bitField0_ = (bitField0_ & ~0x00000001);
35531         return this;
35532       }
35533 
clone()35534       public Builder clone() {
35535         return create().mergeFrom(buildPartial());
35536       }
35537 
35538       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()35539           getDescriptorForType() {
35540         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotRequest_descriptor;
35541       }
35542 
getDefaultInstanceForType()35543       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest getDefaultInstanceForType() {
35544         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
35545       }
35546 
build()35547       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest build() {
35548         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest result = buildPartial();
35549         if (!result.isInitialized()) {
35550           throw newUninitializedMessageException(result);
35551         }
35552         return result;
35553       }
35554 
buildPartial()35555       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest buildPartial() {
35556         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest(this);
35557         int from_bitField0_ = bitField0_;
35558         int to_bitField0_ = 0;
35559         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
35560           to_bitField0_ |= 0x00000001;
35561         }
35562         if (snapshotBuilder_ == null) {
35563           result.snapshot_ = snapshot_;
35564         } else {
35565           result.snapshot_ = snapshotBuilder_.build();
35566         }
35567         result.bitField0_ = to_bitField0_;
35568         onBuilt();
35569         return result;
35570       }
35571 
mergeFrom(com.google.protobuf.Message other)35572       public Builder mergeFrom(com.google.protobuf.Message other) {
35573         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest) {
35574           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)other);
35575         } else {
35576           super.mergeFrom(other);
35577           return this;
35578         }
35579       }
35580 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest other)35581       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest other) {
35582         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance()) return this;
35583         if (other.hasSnapshot()) {
35584           mergeSnapshot(other.getSnapshot());
35585         }
35586         this.mergeUnknownFields(other.getUnknownFields());
35587         return this;
35588       }
35589 
isInitialized()35590       public final boolean isInitialized() {
35591         if (!hasSnapshot()) {
35592 
35593           return false;
35594         }
35595         if (!getSnapshot().isInitialized()) {
35596 
35597           return false;
35598         }
35599         return true;
35600       }
35601 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35602       public Builder mergeFrom(
35603           com.google.protobuf.CodedInputStream input,
35604           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35605           throws java.io.IOException {
35606         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest parsedMessage = null;
35607         try {
35608           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
35609         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
35610           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest) e.getUnfinishedMessage();
35611           throw e;
35612         } finally {
35613           if (parsedMessage != null) {
35614             mergeFrom(parsedMessage);
35615           }
35616         }
35617         return this;
35618       }
35619       private int bitField0_;
35620 
35621       // required .SnapshotDescription snapshot = 1;
35622       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
35623       private com.google.protobuf.SingleFieldBuilder<
35624           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
35625       /**
35626        * <code>required .SnapshotDescription snapshot = 1;</code>
35627        */
hasSnapshot()35628       public boolean hasSnapshot() {
35629         return ((bitField0_ & 0x00000001) == 0x00000001);
35630       }
35631       /**
35632        * <code>required .SnapshotDescription snapshot = 1;</code>
35633        */
getSnapshot()35634       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
35635         if (snapshotBuilder_ == null) {
35636           return snapshot_;
35637         } else {
35638           return snapshotBuilder_.getMessage();
35639         }
35640       }
35641       /**
35642        * <code>required .SnapshotDescription snapshot = 1;</code>
35643        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)35644       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
35645         if (snapshotBuilder_ == null) {
35646           if (value == null) {
35647             throw new NullPointerException();
35648           }
35649           snapshot_ = value;
35650           onChanged();
35651         } else {
35652           snapshotBuilder_.setMessage(value);
35653         }
35654         bitField0_ |= 0x00000001;
35655         return this;
35656       }
35657       /**
35658        * <code>required .SnapshotDescription snapshot = 1;</code>
35659        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)35660       public Builder setSnapshot(
35661           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
35662         if (snapshotBuilder_ == null) {
35663           snapshot_ = builderForValue.build();
35664           onChanged();
35665         } else {
35666           snapshotBuilder_.setMessage(builderForValue.build());
35667         }
35668         bitField0_ |= 0x00000001;
35669         return this;
35670       }
35671       /**
35672        * <code>required .SnapshotDescription snapshot = 1;</code>
35673        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)35674       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
35675         if (snapshotBuilder_ == null) {
35676           if (((bitField0_ & 0x00000001) == 0x00000001) &&
35677               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
35678             snapshot_ =
35679               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
35680           } else {
35681             snapshot_ = value;
35682           }
35683           onChanged();
35684         } else {
35685           snapshotBuilder_.mergeFrom(value);
35686         }
35687         bitField0_ |= 0x00000001;
35688         return this;
35689       }
35690       /**
35691        * <code>required .SnapshotDescription snapshot = 1;</code>
35692        */
clearSnapshot()35693       public Builder clearSnapshot() {
35694         if (snapshotBuilder_ == null) {
35695           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
35696           onChanged();
35697         } else {
35698           snapshotBuilder_.clear();
35699         }
35700         bitField0_ = (bitField0_ & ~0x00000001);
35701         return this;
35702       }
35703       /**
35704        * <code>required .SnapshotDescription snapshot = 1;</code>
35705        */
getSnapshotBuilder()35706       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
35707         bitField0_ |= 0x00000001;
35708         onChanged();
35709         return getSnapshotFieldBuilder().getBuilder();
35710       }
35711       /**
35712        * <code>required .SnapshotDescription snapshot = 1;</code>
35713        */
getSnapshotOrBuilder()35714       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
35715         if (snapshotBuilder_ != null) {
35716           return snapshotBuilder_.getMessageOrBuilder();
35717         } else {
35718           return snapshot_;
35719         }
35720       }
35721       /**
35722        * <code>required .SnapshotDescription snapshot = 1;</code>
35723        */
35724       private com.google.protobuf.SingleFieldBuilder<
35725           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotFieldBuilder()35726           getSnapshotFieldBuilder() {
35727         if (snapshotBuilder_ == null) {
35728           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
35729               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
35730                   snapshot_,
35731                   getParentForChildren(),
35732                   isClean());
35733           snapshot_ = null;
35734         }
35735         return snapshotBuilder_;
35736       }
35737 
35738       // @@protoc_insertion_point(builder_scope:RestoreSnapshotRequest)
35739     }
35740 
35741     static {
35742       defaultInstance = new RestoreSnapshotRequest(true);
defaultInstance.initFields()35743       defaultInstance.initFields();
35744     }
35745 
35746     // @@protoc_insertion_point(class_scope:RestoreSnapshotRequest)
35747   }
35748 
35749   public interface RestoreSnapshotResponseOrBuilder
35750       extends com.google.protobuf.MessageOrBuilder {
35751   }
35752   /**
35753    * Protobuf type {@code RestoreSnapshotResponse}
35754    */
35755   public static final class RestoreSnapshotResponse extends
35756       com.google.protobuf.GeneratedMessage
35757       implements RestoreSnapshotResponseOrBuilder {
35758     // Use RestoreSnapshotResponse.newBuilder() to construct.
RestoreSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)35759     private RestoreSnapshotResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
35760       super(builder);
35761       this.unknownFields = builder.getUnknownFields();
35762     }
RestoreSnapshotResponse(boolean noInit)35763     private RestoreSnapshotResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
35764 
35765     private static final RestoreSnapshotResponse defaultInstance;
getDefaultInstance()35766     public static RestoreSnapshotResponse getDefaultInstance() {
35767       return defaultInstance;
35768     }
35769 
getDefaultInstanceForType()35770     public RestoreSnapshotResponse getDefaultInstanceForType() {
35771       return defaultInstance;
35772     }
35773 
35774     private final com.google.protobuf.UnknownFieldSet unknownFields;
35775     @java.lang.Override
35776     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()35777         getUnknownFields() {
35778       return this.unknownFields;
35779     }
RestoreSnapshotResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35780     private RestoreSnapshotResponse(
35781         com.google.protobuf.CodedInputStream input,
35782         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35783         throws com.google.protobuf.InvalidProtocolBufferException {
35784       initFields();
35785       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
35786           com.google.protobuf.UnknownFieldSet.newBuilder();
35787       try {
35788         boolean done = false;
35789         while (!done) {
35790           int tag = input.readTag();
35791           switch (tag) {
35792             case 0:
35793               done = true;
35794               break;
35795             default: {
35796               if (!parseUnknownField(input, unknownFields,
35797                                      extensionRegistry, tag)) {
35798                 done = true;
35799               }
35800               break;
35801             }
35802           }
35803         }
35804       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
35805         throw e.setUnfinishedMessage(this);
35806       } catch (java.io.IOException e) {
35807         throw new com.google.protobuf.InvalidProtocolBufferException(
35808             e.getMessage()).setUnfinishedMessage(this);
35809       } finally {
35810         this.unknownFields = unknownFields.build();
35811         makeExtensionsImmutable();
35812       }
35813     }
35814     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()35815         getDescriptor() {
35816       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_descriptor;
35817     }
35818 
35819     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()35820         internalGetFieldAccessorTable() {
35821       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable
35822           .ensureFieldAccessorsInitialized(
35823               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.Builder.class);
35824     }
35825 
35826     public static com.google.protobuf.Parser<RestoreSnapshotResponse> PARSER =
35827         new com.google.protobuf.AbstractParser<RestoreSnapshotResponse>() {
35828       public RestoreSnapshotResponse parsePartialFrom(
35829           com.google.protobuf.CodedInputStream input,
35830           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35831           throws com.google.protobuf.InvalidProtocolBufferException {
35832         return new RestoreSnapshotResponse(input, extensionRegistry);
35833       }
35834     };
35835 
35836     @java.lang.Override
getParserForType()35837     public com.google.protobuf.Parser<RestoreSnapshotResponse> getParserForType() {
35838       return PARSER;
35839     }
35840 
initFields()35841     private void initFields() {
35842     }
35843     private byte memoizedIsInitialized = -1;
isInitialized()35844     public final boolean isInitialized() {
35845       byte isInitialized = memoizedIsInitialized;
35846       if (isInitialized != -1) return isInitialized == 1;
35847 
35848       memoizedIsInitialized = 1;
35849       return true;
35850     }
35851 
writeTo(com.google.protobuf.CodedOutputStream output)35852     public void writeTo(com.google.protobuf.CodedOutputStream output)
35853                         throws java.io.IOException {
35854       getSerializedSize();
35855       getUnknownFields().writeTo(output);
35856     }
35857 
35858     private int memoizedSerializedSize = -1;
getSerializedSize()35859     public int getSerializedSize() {
35860       int size = memoizedSerializedSize;
35861       if (size != -1) return size;
35862 
35863       size = 0;
35864       size += getUnknownFields().getSerializedSize();
35865       memoizedSerializedSize = size;
35866       return size;
35867     }
35868 
35869     private static final long serialVersionUID = 0L;
35870     @java.lang.Override
writeReplace()35871     protected java.lang.Object writeReplace()
35872         throws java.io.ObjectStreamException {
35873       return super.writeReplace();
35874     }
35875 
35876     @java.lang.Override
equals(final java.lang.Object obj)35877     public boolean equals(final java.lang.Object obj) {
35878       if (obj == this) {
35879        return true;
35880       }
35881       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse)) {
35882         return super.equals(obj);
35883       }
35884       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) obj;
35885 
35886       boolean result = true;
35887       result = result &&
35888           getUnknownFields().equals(other.getUnknownFields());
35889       return result;
35890     }
35891 
35892     private int memoizedHashCode = 0;
35893     @java.lang.Override
hashCode()35894     public int hashCode() {
35895       if (memoizedHashCode != 0) {
35896         return memoizedHashCode;
35897       }
35898       int hash = 41;
35899       hash = (19 * hash) + getDescriptorForType().hashCode();
35900       hash = (29 * hash) + getUnknownFields().hashCode();
35901       memoizedHashCode = hash;
35902       return hash;
35903     }
35904 
parseFrom( com.google.protobuf.ByteString data)35905     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(
35906         com.google.protobuf.ByteString data)
35907         throws com.google.protobuf.InvalidProtocolBufferException {
35908       return PARSER.parseFrom(data);
35909     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35910     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(
35911         com.google.protobuf.ByteString data,
35912         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35913         throws com.google.protobuf.InvalidProtocolBufferException {
35914       return PARSER.parseFrom(data, extensionRegistry);
35915     }
parseFrom(byte[] data)35916     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(byte[] data)
35917         throws com.google.protobuf.InvalidProtocolBufferException {
35918       return PARSER.parseFrom(data);
35919     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35920     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(
35921         byte[] data,
35922         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35923         throws com.google.protobuf.InvalidProtocolBufferException {
35924       return PARSER.parseFrom(data, extensionRegistry);
35925     }
parseFrom(java.io.InputStream input)35926     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(java.io.InputStream input)
35927         throws java.io.IOException {
35928       return PARSER.parseFrom(input);
35929     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35930     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(
35931         java.io.InputStream input,
35932         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35933         throws java.io.IOException {
35934       return PARSER.parseFrom(input, extensionRegistry);
35935     }
parseDelimitedFrom(java.io.InputStream input)35936     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseDelimitedFrom(java.io.InputStream input)
35937         throws java.io.IOException {
35938       return PARSER.parseDelimitedFrom(input);
35939     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35940     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseDelimitedFrom(
35941         java.io.InputStream input,
35942         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35943         throws java.io.IOException {
35944       return PARSER.parseDelimitedFrom(input, extensionRegistry);
35945     }
parseFrom( com.google.protobuf.CodedInputStream input)35946     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(
35947         com.google.protobuf.CodedInputStream input)
35948         throws java.io.IOException {
35949       return PARSER.parseFrom(input);
35950     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)35951     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parseFrom(
35952         com.google.protobuf.CodedInputStream input,
35953         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
35954         throws java.io.IOException {
35955       return PARSER.parseFrom(input, extensionRegistry);
35956     }
35957 
newBuilder()35958     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()35959     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse prototype)35960     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse prototype) {
35961       return newBuilder().mergeFrom(prototype);
35962     }
toBuilder()35963     public Builder toBuilder() { return newBuilder(this); }
35964 
35965     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)35966     protected Builder newBuilderForType(
35967         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
35968       Builder builder = new Builder(parent);
35969       return builder;
35970     }
35971     /**
35972      * Protobuf type {@code RestoreSnapshotResponse}
35973      */
35974     public static final class Builder extends
35975         com.google.protobuf.GeneratedMessage.Builder<Builder>
35976        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponseOrBuilder {
35977       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()35978           getDescriptor() {
35979         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_descriptor;
35980       }
35981 
35982       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()35983           internalGetFieldAccessorTable() {
35984         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_fieldAccessorTable
35985             .ensureFieldAccessorsInitialized(
35986                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.Builder.class);
35987       }
35988 
35989       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.newBuilder()
Builder()35990       private Builder() {
35991         maybeForceBuilderInitialization();
35992       }
35993 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)35994       private Builder(
35995           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
35996         super(parent);
35997         maybeForceBuilderInitialization();
35998       }
maybeForceBuilderInitialization()35999       private void maybeForceBuilderInitialization() {
36000         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
36001         }
36002       }
create()36003       private static Builder create() {
36004         return new Builder();
36005       }
36006 
clear()36007       public Builder clear() {
36008         super.clear();
36009         return this;
36010       }
36011 
clone()36012       public Builder clone() {
36013         return create().mergeFrom(buildPartial());
36014       }
36015 
36016       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()36017           getDescriptorForType() {
36018         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_RestoreSnapshotResponse_descriptor;
36019       }
36020 
getDefaultInstanceForType()36021       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse getDefaultInstanceForType() {
36022         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
36023       }
36024 
build()36025       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse build() {
36026         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse result = buildPartial();
36027         if (!result.isInitialized()) {
36028           throw newUninitializedMessageException(result);
36029         }
36030         return result;
36031       }
36032 
buildPartial()36033       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse buildPartial() {
36034         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse(this);
36035         onBuilt();
36036         return result;
36037       }
36038 
mergeFrom(com.google.protobuf.Message other)36039       public Builder mergeFrom(com.google.protobuf.Message other) {
36040         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) {
36041           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse)other);
36042         } else {
36043           super.mergeFrom(other);
36044           return this;
36045         }
36046       }
36047 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse other)36048       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse other) {
36049         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance()) return this;
36050         this.mergeUnknownFields(other.getUnknownFields());
36051         return this;
36052       }
36053 
isInitialized()36054       public final boolean isInitialized() {
36055         return true;
36056       }
36057 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36058       public Builder mergeFrom(
36059           com.google.protobuf.CodedInputStream input,
36060           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36061           throws java.io.IOException {
36062         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse parsedMessage = null;
36063         try {
36064           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
36065         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
36066           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) e.getUnfinishedMessage();
36067           throw e;
36068         } finally {
36069           if (parsedMessage != null) {
36070             mergeFrom(parsedMessage);
36071           }
36072         }
36073         return this;
36074       }
36075 
36076       // @@protoc_insertion_point(builder_scope:RestoreSnapshotResponse)
36077     }
36078 
36079     static {
36080       defaultInstance = new RestoreSnapshotResponse(true);
defaultInstance.initFields()36081       defaultInstance.initFields();
36082     }
36083 
36084     // @@protoc_insertion_point(class_scope:RestoreSnapshotResponse)
36085   }
36086 
36087   public interface IsSnapshotDoneRequestOrBuilder
36088       extends com.google.protobuf.MessageOrBuilder {
36089 
36090     // optional .SnapshotDescription snapshot = 1;
36091     /**
36092      * <code>optional .SnapshotDescription snapshot = 1;</code>
36093      */
hasSnapshot()36094     boolean hasSnapshot();
36095     /**
36096      * <code>optional .SnapshotDescription snapshot = 1;</code>
36097      */
getSnapshot()36098     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
36099     /**
36100      * <code>optional .SnapshotDescription snapshot = 1;</code>
36101      */
getSnapshotOrBuilder()36102     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
36103   }
36104   /**
36105    * Protobuf type {@code IsSnapshotDoneRequest}
36106    *
36107    * <pre>
36108    * if you don't send the snapshot, then you will get it back
36109    * in the response (if the snapshot is done) so you can check the snapshot
36110    * </pre>
36111    */
36112   public static final class IsSnapshotDoneRequest extends
36113       com.google.protobuf.GeneratedMessage
36114       implements IsSnapshotDoneRequestOrBuilder {
36115     // Use IsSnapshotDoneRequest.newBuilder() to construct.
IsSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)36116     private IsSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
36117       super(builder);
36118       this.unknownFields = builder.getUnknownFields();
36119     }
IsSnapshotDoneRequest(boolean noInit)36120     private IsSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
36121 
36122     private static final IsSnapshotDoneRequest defaultInstance;
getDefaultInstance()36123     public static IsSnapshotDoneRequest getDefaultInstance() {
36124       return defaultInstance;
36125     }
36126 
getDefaultInstanceForType()36127     public IsSnapshotDoneRequest getDefaultInstanceForType() {
36128       return defaultInstance;
36129     }
36130 
36131     private final com.google.protobuf.UnknownFieldSet unknownFields;
36132     @java.lang.Override
36133     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()36134         getUnknownFields() {
36135       return this.unknownFields;
36136     }
IsSnapshotDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36137     private IsSnapshotDoneRequest(
36138         com.google.protobuf.CodedInputStream input,
36139         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36140         throws com.google.protobuf.InvalidProtocolBufferException {
36141       initFields();
36142       int mutable_bitField0_ = 0;
36143       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
36144           com.google.protobuf.UnknownFieldSet.newBuilder();
36145       try {
36146         boolean done = false;
36147         while (!done) {
36148           int tag = input.readTag();
36149           switch (tag) {
36150             case 0:
36151               done = true;
36152               break;
36153             default: {
36154               if (!parseUnknownField(input, unknownFields,
36155                                      extensionRegistry, tag)) {
36156                 done = true;
36157               }
36158               break;
36159             }
36160             case 10: {
36161               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
36162               if (((bitField0_ & 0x00000001) == 0x00000001)) {
36163                 subBuilder = snapshot_.toBuilder();
36164               }
36165               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
36166               if (subBuilder != null) {
36167                 subBuilder.mergeFrom(snapshot_);
36168                 snapshot_ = subBuilder.buildPartial();
36169               }
36170               bitField0_ |= 0x00000001;
36171               break;
36172             }
36173           }
36174         }
36175       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
36176         throw e.setUnfinishedMessage(this);
36177       } catch (java.io.IOException e) {
36178         throw new com.google.protobuf.InvalidProtocolBufferException(
36179             e.getMessage()).setUnfinishedMessage(this);
36180       } finally {
36181         this.unknownFields = unknownFields.build();
36182         makeExtensionsImmutable();
36183       }
36184     }
36185     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()36186         getDescriptor() {
36187       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_descriptor;
36188     }
36189 
36190     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()36191         internalGetFieldAccessorTable() {
36192       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable
36193           .ensureFieldAccessorsInitialized(
36194               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.Builder.class);
36195     }
36196 
36197     public static com.google.protobuf.Parser<IsSnapshotDoneRequest> PARSER =
36198         new com.google.protobuf.AbstractParser<IsSnapshotDoneRequest>() {
36199       public IsSnapshotDoneRequest parsePartialFrom(
36200           com.google.protobuf.CodedInputStream input,
36201           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36202           throws com.google.protobuf.InvalidProtocolBufferException {
36203         return new IsSnapshotDoneRequest(input, extensionRegistry);
36204       }
36205     };
36206 
36207     @java.lang.Override
getParserForType()36208     public com.google.protobuf.Parser<IsSnapshotDoneRequest> getParserForType() {
36209       return PARSER;
36210     }
36211 
36212     private int bitField0_;
36213     // optional .SnapshotDescription snapshot = 1;
36214     public static final int SNAPSHOT_FIELD_NUMBER = 1;
36215     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
36216     /**
36217      * <code>optional .SnapshotDescription snapshot = 1;</code>
36218      */
hasSnapshot()36219     public boolean hasSnapshot() {
36220       return ((bitField0_ & 0x00000001) == 0x00000001);
36221     }
36222     /**
36223      * <code>optional .SnapshotDescription snapshot = 1;</code>
36224      */
getSnapshot()36225     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
36226       return snapshot_;
36227     }
36228     /**
36229      * <code>optional .SnapshotDescription snapshot = 1;</code>
36230      */
getSnapshotOrBuilder()36231     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
36232       return snapshot_;
36233     }
36234 
initFields()36235     private void initFields() {
36236       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
36237     }
36238     private byte memoizedIsInitialized = -1;
isInitialized()36239     public final boolean isInitialized() {
36240       byte isInitialized = memoizedIsInitialized;
36241       if (isInitialized != -1) return isInitialized == 1;
36242 
36243       if (hasSnapshot()) {
36244         if (!getSnapshot().isInitialized()) {
36245           memoizedIsInitialized = 0;
36246           return false;
36247         }
36248       }
36249       memoizedIsInitialized = 1;
36250       return true;
36251     }
36252 
writeTo(com.google.protobuf.CodedOutputStream output)36253     public void writeTo(com.google.protobuf.CodedOutputStream output)
36254                         throws java.io.IOException {
36255       getSerializedSize();
36256       if (((bitField0_ & 0x00000001) == 0x00000001)) {
36257         output.writeMessage(1, snapshot_);
36258       }
36259       getUnknownFields().writeTo(output);
36260     }
36261 
36262     private int memoizedSerializedSize = -1;
getSerializedSize()36263     public int getSerializedSize() {
36264       int size = memoizedSerializedSize;
36265       if (size != -1) return size;
36266 
36267       size = 0;
36268       if (((bitField0_ & 0x00000001) == 0x00000001)) {
36269         size += com.google.protobuf.CodedOutputStream
36270           .computeMessageSize(1, snapshot_);
36271       }
36272       size += getUnknownFields().getSerializedSize();
36273       memoizedSerializedSize = size;
36274       return size;
36275     }
36276 
36277     private static final long serialVersionUID = 0L;
36278     @java.lang.Override
writeReplace()36279     protected java.lang.Object writeReplace()
36280         throws java.io.ObjectStreamException {
36281       return super.writeReplace();
36282     }
36283 
36284     @java.lang.Override
equals(final java.lang.Object obj)36285     public boolean equals(final java.lang.Object obj) {
36286       if (obj == this) {
36287        return true;
36288       }
36289       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)) {
36290         return super.equals(obj);
36291       }
36292       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) obj;
36293 
36294       boolean result = true;
36295       result = result && (hasSnapshot() == other.hasSnapshot());
36296       if (hasSnapshot()) {
36297         result = result && getSnapshot()
36298             .equals(other.getSnapshot());
36299       }
36300       result = result &&
36301           getUnknownFields().equals(other.getUnknownFields());
36302       return result;
36303     }
36304 
36305     private int memoizedHashCode = 0;
36306     @java.lang.Override
hashCode()36307     public int hashCode() {
36308       if (memoizedHashCode != 0) {
36309         return memoizedHashCode;
36310       }
36311       int hash = 41;
36312       hash = (19 * hash) + getDescriptorForType().hashCode();
36313       if (hasSnapshot()) {
36314         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
36315         hash = (53 * hash) + getSnapshot().hashCode();
36316       }
36317       hash = (29 * hash) + getUnknownFields().hashCode();
36318       memoizedHashCode = hash;
36319       return hash;
36320     }
36321 
parseFrom( com.google.protobuf.ByteString data)36322     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(
36323         com.google.protobuf.ByteString data)
36324         throws com.google.protobuf.InvalidProtocolBufferException {
36325       return PARSER.parseFrom(data);
36326     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36327     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(
36328         com.google.protobuf.ByteString data,
36329         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36330         throws com.google.protobuf.InvalidProtocolBufferException {
36331       return PARSER.parseFrom(data, extensionRegistry);
36332     }
parseFrom(byte[] data)36333     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(byte[] data)
36334         throws com.google.protobuf.InvalidProtocolBufferException {
36335       return PARSER.parseFrom(data);
36336     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36337     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(
36338         byte[] data,
36339         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36340         throws com.google.protobuf.InvalidProtocolBufferException {
36341       return PARSER.parseFrom(data, extensionRegistry);
36342     }
parseFrom(java.io.InputStream input)36343     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(java.io.InputStream input)
36344         throws java.io.IOException {
36345       return PARSER.parseFrom(input);
36346     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36347     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(
36348         java.io.InputStream input,
36349         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36350         throws java.io.IOException {
36351       return PARSER.parseFrom(input, extensionRegistry);
36352     }
parseDelimitedFrom(java.io.InputStream input)36353     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input)
36354         throws java.io.IOException {
36355       return PARSER.parseDelimitedFrom(input);
36356     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36357     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseDelimitedFrom(
36358         java.io.InputStream input,
36359         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36360         throws java.io.IOException {
36361       return PARSER.parseDelimitedFrom(input, extensionRegistry);
36362     }
parseFrom( com.google.protobuf.CodedInputStream input)36363     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(
36364         com.google.protobuf.CodedInputStream input)
36365         throws java.io.IOException {
36366       return PARSER.parseFrom(input);
36367     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36368     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parseFrom(
36369         com.google.protobuf.CodedInputStream input,
36370         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36371         throws java.io.IOException {
36372       return PARSER.parseFrom(input, extensionRegistry);
36373     }
36374 
newBuilder()36375     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()36376     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest prototype)36377     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest prototype) {
36378       return newBuilder().mergeFrom(prototype);
36379     }
toBuilder()36380     public Builder toBuilder() { return newBuilder(this); }
36381 
36382     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)36383     protected Builder newBuilderForType(
36384         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
36385       Builder builder = new Builder(parent);
36386       return builder;
36387     }
36388     /**
36389      * Protobuf type {@code IsSnapshotDoneRequest}
36390      *
36391      * <pre>
36392      * if you don't send the snapshot, then you will get it back
36393      * in the response (if the snapshot is done) so you can check the snapshot
36394      * </pre>
36395      */
36396     public static final class Builder extends
36397         com.google.protobuf.GeneratedMessage.Builder<Builder>
36398        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequestOrBuilder {
36399       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()36400           getDescriptor() {
36401         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_descriptor;
36402       }
36403 
36404       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()36405           internalGetFieldAccessorTable() {
36406         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_fieldAccessorTable
36407             .ensureFieldAccessorsInitialized(
36408                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.Builder.class);
36409       }
36410 
36411       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.newBuilder()
Builder()36412       private Builder() {
36413         maybeForceBuilderInitialization();
36414       }
36415 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)36416       private Builder(
36417           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
36418         super(parent);
36419         maybeForceBuilderInitialization();
36420       }
maybeForceBuilderInitialization()36421       private void maybeForceBuilderInitialization() {
36422         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
36423           getSnapshotFieldBuilder();
36424         }
36425       }
create()36426       private static Builder create() {
36427         return new Builder();
36428       }
36429 
clear()36430       public Builder clear() {
36431         super.clear();
36432         if (snapshotBuilder_ == null) {
36433           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
36434         } else {
36435           snapshotBuilder_.clear();
36436         }
36437         bitField0_ = (bitField0_ & ~0x00000001);
36438         return this;
36439       }
36440 
clone()36441       public Builder clone() {
36442         return create().mergeFrom(buildPartial());
36443       }
36444 
36445       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()36446           getDescriptorForType() {
36447         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneRequest_descriptor;
36448       }
36449 
getDefaultInstanceForType()36450       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest getDefaultInstanceForType() {
36451         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
36452       }
36453 
build()36454       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest build() {
36455         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest result = buildPartial();
36456         if (!result.isInitialized()) {
36457           throw newUninitializedMessageException(result);
36458         }
36459         return result;
36460       }
36461 
buildPartial()36462       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest buildPartial() {
36463         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest(this);
36464         int from_bitField0_ = bitField0_;
36465         int to_bitField0_ = 0;
36466         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
36467           to_bitField0_ |= 0x00000001;
36468         }
36469         if (snapshotBuilder_ == null) {
36470           result.snapshot_ = snapshot_;
36471         } else {
36472           result.snapshot_ = snapshotBuilder_.build();
36473         }
36474         result.bitField0_ = to_bitField0_;
36475         onBuilt();
36476         return result;
36477       }
36478 
mergeFrom(com.google.protobuf.Message other)36479       public Builder mergeFrom(com.google.protobuf.Message other) {
36480         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) {
36481           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)other);
36482         } else {
36483           super.mergeFrom(other);
36484           return this;
36485         }
36486       }
36487 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest other)36488       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest other) {
36489         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance()) return this;
36490         if (other.hasSnapshot()) {
36491           mergeSnapshot(other.getSnapshot());
36492         }
36493         this.mergeUnknownFields(other.getUnknownFields());
36494         return this;
36495       }
36496 
isInitialized()36497       public final boolean isInitialized() {
36498         if (hasSnapshot()) {
36499           if (!getSnapshot().isInitialized()) {
36500 
36501             return false;
36502           }
36503         }
36504         return true;
36505       }
36506 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36507       public Builder mergeFrom(
36508           com.google.protobuf.CodedInputStream input,
36509           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36510           throws java.io.IOException {
36511         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest parsedMessage = null;
36512         try {
36513           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
36514         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
36515           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest) e.getUnfinishedMessage();
36516           throw e;
36517         } finally {
36518           if (parsedMessage != null) {
36519             mergeFrom(parsedMessage);
36520           }
36521         }
36522         return this;
36523       }
36524       private int bitField0_;
36525 
36526       // optional .SnapshotDescription snapshot = 1;
36527       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
36528       private com.google.protobuf.SingleFieldBuilder<
36529           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
36530       /**
36531        * <code>optional .SnapshotDescription snapshot = 1;</code>
36532        */
hasSnapshot()36533       public boolean hasSnapshot() {
36534         return ((bitField0_ & 0x00000001) == 0x00000001);
36535       }
36536       /**
36537        * <code>optional .SnapshotDescription snapshot = 1;</code>
36538        */
getSnapshot()36539       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
36540         if (snapshotBuilder_ == null) {
36541           return snapshot_;
36542         } else {
36543           return snapshotBuilder_.getMessage();
36544         }
36545       }
36546       /**
36547        * <code>optional .SnapshotDescription snapshot = 1;</code>
36548        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)36549       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
36550         if (snapshotBuilder_ == null) {
36551           if (value == null) {
36552             throw new NullPointerException();
36553           }
36554           snapshot_ = value;
36555           onChanged();
36556         } else {
36557           snapshotBuilder_.setMessage(value);
36558         }
36559         bitField0_ |= 0x00000001;
36560         return this;
36561       }
36562       /**
36563        * <code>optional .SnapshotDescription snapshot = 1;</code>
36564        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)36565       public Builder setSnapshot(
36566           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
36567         if (snapshotBuilder_ == null) {
36568           snapshot_ = builderForValue.build();
36569           onChanged();
36570         } else {
36571           snapshotBuilder_.setMessage(builderForValue.build());
36572         }
36573         bitField0_ |= 0x00000001;
36574         return this;
36575       }
36576       /**
36577        * <code>optional .SnapshotDescription snapshot = 1;</code>
36578        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)36579       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
36580         if (snapshotBuilder_ == null) {
36581           if (((bitField0_ & 0x00000001) == 0x00000001) &&
36582               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
36583             snapshot_ =
36584               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
36585           } else {
36586             snapshot_ = value;
36587           }
36588           onChanged();
36589         } else {
36590           snapshotBuilder_.mergeFrom(value);
36591         }
36592         bitField0_ |= 0x00000001;
36593         return this;
36594       }
36595       /**
36596        * <code>optional .SnapshotDescription snapshot = 1;</code>
36597        */
clearSnapshot()36598       public Builder clearSnapshot() {
36599         if (snapshotBuilder_ == null) {
36600           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
36601           onChanged();
36602         } else {
36603           snapshotBuilder_.clear();
36604         }
36605         bitField0_ = (bitField0_ & ~0x00000001);
36606         return this;
36607       }
36608       /**
36609        * <code>optional .SnapshotDescription snapshot = 1;</code>
36610        */
getSnapshotBuilder()36611       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
36612         bitField0_ |= 0x00000001;
36613         onChanged();
36614         return getSnapshotFieldBuilder().getBuilder();
36615       }
36616       /**
36617        * <code>optional .SnapshotDescription snapshot = 1;</code>
36618        */
getSnapshotOrBuilder()36619       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
36620         if (snapshotBuilder_ != null) {
36621           return snapshotBuilder_.getMessageOrBuilder();
36622         } else {
36623           return snapshot_;
36624         }
36625       }
36626       /**
36627        * <code>optional .SnapshotDescription snapshot = 1;</code>
36628        */
36629       private com.google.protobuf.SingleFieldBuilder<
36630           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotFieldBuilder()36631           getSnapshotFieldBuilder() {
36632         if (snapshotBuilder_ == null) {
36633           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
36634               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
36635                   snapshot_,
36636                   getParentForChildren(),
36637                   isClean());
36638           snapshot_ = null;
36639         }
36640         return snapshotBuilder_;
36641       }
36642 
36643       // @@protoc_insertion_point(builder_scope:IsSnapshotDoneRequest)
36644     }
36645 
36646     static {
36647       defaultInstance = new IsSnapshotDoneRequest(true);
defaultInstance.initFields()36648       defaultInstance.initFields();
36649     }
36650 
36651     // @@protoc_insertion_point(class_scope:IsSnapshotDoneRequest)
36652   }
36653 
36654   public interface IsSnapshotDoneResponseOrBuilder
36655       extends com.google.protobuf.MessageOrBuilder {
36656 
36657     // optional bool done = 1 [default = false];
36658     /**
36659      * <code>optional bool done = 1 [default = false];</code>
36660      */
hasDone()36661     boolean hasDone();
36662     /**
36663      * <code>optional bool done = 1 [default = false];</code>
36664      */
getDone()36665     boolean getDone();
36666 
36667     // optional .SnapshotDescription snapshot = 2;
36668     /**
36669      * <code>optional .SnapshotDescription snapshot = 2;</code>
36670      */
hasSnapshot()36671     boolean hasSnapshot();
36672     /**
36673      * <code>optional .SnapshotDescription snapshot = 2;</code>
36674      */
getSnapshot()36675     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
36676     /**
36677      * <code>optional .SnapshotDescription snapshot = 2;</code>
36678      */
getSnapshotOrBuilder()36679     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
36680   }
36681   /**
36682    * Protobuf type {@code IsSnapshotDoneResponse}
36683    */
36684   public static final class IsSnapshotDoneResponse extends
36685       com.google.protobuf.GeneratedMessage
36686       implements IsSnapshotDoneResponseOrBuilder {
36687     // Use IsSnapshotDoneResponse.newBuilder() to construct.
IsSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)36688     private IsSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
36689       super(builder);
36690       this.unknownFields = builder.getUnknownFields();
36691     }
IsSnapshotDoneResponse(boolean noInit)36692     private IsSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
36693 
36694     private static final IsSnapshotDoneResponse defaultInstance;
getDefaultInstance()36695     public static IsSnapshotDoneResponse getDefaultInstance() {
36696       return defaultInstance;
36697     }
36698 
getDefaultInstanceForType()36699     public IsSnapshotDoneResponse getDefaultInstanceForType() {
36700       return defaultInstance;
36701     }
36702 
36703     private final com.google.protobuf.UnknownFieldSet unknownFields;
36704     @java.lang.Override
36705     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()36706         getUnknownFields() {
36707       return this.unknownFields;
36708     }
IsSnapshotDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36709     private IsSnapshotDoneResponse(
36710         com.google.protobuf.CodedInputStream input,
36711         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36712         throws com.google.protobuf.InvalidProtocolBufferException {
36713       initFields();
36714       int mutable_bitField0_ = 0;
36715       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
36716           com.google.protobuf.UnknownFieldSet.newBuilder();
36717       try {
36718         boolean done = false;
36719         while (!done) {
36720           int tag = input.readTag();
36721           switch (tag) {
36722             case 0:
36723               done = true;
36724               break;
36725             default: {
36726               if (!parseUnknownField(input, unknownFields,
36727                                      extensionRegistry, tag)) {
36728                 done = true;
36729               }
36730               break;
36731             }
36732             case 8: {
36733               bitField0_ |= 0x00000001;
36734               done_ = input.readBool();
36735               break;
36736             }
36737             case 18: {
36738               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
36739               if (((bitField0_ & 0x00000002) == 0x00000002)) {
36740                 subBuilder = snapshot_.toBuilder();
36741               }
36742               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
36743               if (subBuilder != null) {
36744                 subBuilder.mergeFrom(snapshot_);
36745                 snapshot_ = subBuilder.buildPartial();
36746               }
36747               bitField0_ |= 0x00000002;
36748               break;
36749             }
36750           }
36751         }
36752       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
36753         throw e.setUnfinishedMessage(this);
36754       } catch (java.io.IOException e) {
36755         throw new com.google.protobuf.InvalidProtocolBufferException(
36756             e.getMessage()).setUnfinishedMessage(this);
36757       } finally {
36758         this.unknownFields = unknownFields.build();
36759         makeExtensionsImmutable();
36760       }
36761     }
36762     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()36763         getDescriptor() {
36764       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_descriptor;
36765     }
36766 
36767     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()36768         internalGetFieldAccessorTable() {
36769       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable
36770           .ensureFieldAccessorsInitialized(
36771               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.Builder.class);
36772     }
36773 
36774     public static com.google.protobuf.Parser<IsSnapshotDoneResponse> PARSER =
36775         new com.google.protobuf.AbstractParser<IsSnapshotDoneResponse>() {
36776       public IsSnapshotDoneResponse parsePartialFrom(
36777           com.google.protobuf.CodedInputStream input,
36778           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36779           throws com.google.protobuf.InvalidProtocolBufferException {
36780         return new IsSnapshotDoneResponse(input, extensionRegistry);
36781       }
36782     };
36783 
36784     @java.lang.Override
getParserForType()36785     public com.google.protobuf.Parser<IsSnapshotDoneResponse> getParserForType() {
36786       return PARSER;
36787     }
36788 
36789     private int bitField0_;
36790     // optional bool done = 1 [default = false];
36791     public static final int DONE_FIELD_NUMBER = 1;
36792     private boolean done_;
36793     /**
36794      * <code>optional bool done = 1 [default = false];</code>
36795      */
hasDone()36796     public boolean hasDone() {
36797       return ((bitField0_ & 0x00000001) == 0x00000001);
36798     }
36799     /**
36800      * <code>optional bool done = 1 [default = false];</code>
36801      */
getDone()36802     public boolean getDone() {
36803       return done_;
36804     }
36805 
36806     // optional .SnapshotDescription snapshot = 2;
36807     public static final int SNAPSHOT_FIELD_NUMBER = 2;
36808     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
36809     /**
36810      * <code>optional .SnapshotDescription snapshot = 2;</code>
36811      */
hasSnapshot()36812     public boolean hasSnapshot() {
36813       return ((bitField0_ & 0x00000002) == 0x00000002);
36814     }
36815     /**
36816      * <code>optional .SnapshotDescription snapshot = 2;</code>
36817      */
getSnapshot()36818     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
36819       return snapshot_;
36820     }
36821     /**
36822      * <code>optional .SnapshotDescription snapshot = 2;</code>
36823      */
getSnapshotOrBuilder()36824     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
36825       return snapshot_;
36826     }
36827 
initFields()36828     private void initFields() {
36829       done_ = false;
36830       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
36831     }
36832     private byte memoizedIsInitialized = -1;
isInitialized()36833     public final boolean isInitialized() {
36834       byte isInitialized = memoizedIsInitialized;
36835       if (isInitialized != -1) return isInitialized == 1;
36836 
36837       if (hasSnapshot()) {
36838         if (!getSnapshot().isInitialized()) {
36839           memoizedIsInitialized = 0;
36840           return false;
36841         }
36842       }
36843       memoizedIsInitialized = 1;
36844       return true;
36845     }
36846 
writeTo(com.google.protobuf.CodedOutputStream output)36847     public void writeTo(com.google.protobuf.CodedOutputStream output)
36848                         throws java.io.IOException {
36849       getSerializedSize();
36850       if (((bitField0_ & 0x00000001) == 0x00000001)) {
36851         output.writeBool(1, done_);
36852       }
36853       if (((bitField0_ & 0x00000002) == 0x00000002)) {
36854         output.writeMessage(2, snapshot_);
36855       }
36856       getUnknownFields().writeTo(output);
36857     }
36858 
36859     private int memoizedSerializedSize = -1;
getSerializedSize()36860     public int getSerializedSize() {
36861       int size = memoizedSerializedSize;
36862       if (size != -1) return size;
36863 
36864       size = 0;
36865       if (((bitField0_ & 0x00000001) == 0x00000001)) {
36866         size += com.google.protobuf.CodedOutputStream
36867           .computeBoolSize(1, done_);
36868       }
36869       if (((bitField0_ & 0x00000002) == 0x00000002)) {
36870         size += com.google.protobuf.CodedOutputStream
36871           .computeMessageSize(2, snapshot_);
36872       }
36873       size += getUnknownFields().getSerializedSize();
36874       memoizedSerializedSize = size;
36875       return size;
36876     }
36877 
36878     private static final long serialVersionUID = 0L;
36879     @java.lang.Override
writeReplace()36880     protected java.lang.Object writeReplace()
36881         throws java.io.ObjectStreamException {
36882       return super.writeReplace();
36883     }
36884 
36885     @java.lang.Override
equals(final java.lang.Object obj)36886     public boolean equals(final java.lang.Object obj) {
36887       if (obj == this) {
36888        return true;
36889       }
36890       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse)) {
36891         return super.equals(obj);
36892       }
36893       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) obj;
36894 
36895       boolean result = true;
36896       result = result && (hasDone() == other.hasDone());
36897       if (hasDone()) {
36898         result = result && (getDone()
36899             == other.getDone());
36900       }
36901       result = result && (hasSnapshot() == other.hasSnapshot());
36902       if (hasSnapshot()) {
36903         result = result && getSnapshot()
36904             .equals(other.getSnapshot());
36905       }
36906       result = result &&
36907           getUnknownFields().equals(other.getUnknownFields());
36908       return result;
36909     }
36910 
36911     private int memoizedHashCode = 0;
36912     @java.lang.Override
hashCode()36913     public int hashCode() {
36914       if (memoizedHashCode != 0) {
36915         return memoizedHashCode;
36916       }
36917       int hash = 41;
36918       hash = (19 * hash) + getDescriptorForType().hashCode();
36919       if (hasDone()) {
36920         hash = (37 * hash) + DONE_FIELD_NUMBER;
36921         hash = (53 * hash) + hashBoolean(getDone());
36922       }
36923       if (hasSnapshot()) {
36924         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
36925         hash = (53 * hash) + getSnapshot().hashCode();
36926       }
36927       hash = (29 * hash) + getUnknownFields().hashCode();
36928       memoizedHashCode = hash;
36929       return hash;
36930     }
36931 
parseFrom( com.google.protobuf.ByteString data)36932     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(
36933         com.google.protobuf.ByteString data)
36934         throws com.google.protobuf.InvalidProtocolBufferException {
36935       return PARSER.parseFrom(data);
36936     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36937     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(
36938         com.google.protobuf.ByteString data,
36939         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36940         throws com.google.protobuf.InvalidProtocolBufferException {
36941       return PARSER.parseFrom(data, extensionRegistry);
36942     }
parseFrom(byte[] data)36943     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(byte[] data)
36944         throws com.google.protobuf.InvalidProtocolBufferException {
36945       return PARSER.parseFrom(data);
36946     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36947     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(
36948         byte[] data,
36949         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36950         throws com.google.protobuf.InvalidProtocolBufferException {
36951       return PARSER.parseFrom(data, extensionRegistry);
36952     }
parseFrom(java.io.InputStream input)36953     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(java.io.InputStream input)
36954         throws java.io.IOException {
36955       return PARSER.parseFrom(input);
36956     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36957     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(
36958         java.io.InputStream input,
36959         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36960         throws java.io.IOException {
36961       return PARSER.parseFrom(input, extensionRegistry);
36962     }
parseDelimitedFrom(java.io.InputStream input)36963     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input)
36964         throws java.io.IOException {
36965       return PARSER.parseDelimitedFrom(input);
36966     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36967     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseDelimitedFrom(
36968         java.io.InputStream input,
36969         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36970         throws java.io.IOException {
36971       return PARSER.parseDelimitedFrom(input, extensionRegistry);
36972     }
parseFrom( com.google.protobuf.CodedInputStream input)36973     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(
36974         com.google.protobuf.CodedInputStream input)
36975         throws java.io.IOException {
36976       return PARSER.parseFrom(input);
36977     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)36978     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parseFrom(
36979         com.google.protobuf.CodedInputStream input,
36980         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
36981         throws java.io.IOException {
36982       return PARSER.parseFrom(input, extensionRegistry);
36983     }
36984 
newBuilder()36985     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()36986     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse prototype)36987     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse prototype) {
36988       return newBuilder().mergeFrom(prototype);
36989     }
toBuilder()36990     public Builder toBuilder() { return newBuilder(this); }
36991 
36992     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)36993     protected Builder newBuilderForType(
36994         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
36995       Builder builder = new Builder(parent);
36996       return builder;
36997     }
36998     /**
36999      * Protobuf type {@code IsSnapshotDoneResponse}
37000      */
37001     public static final class Builder extends
37002         com.google.protobuf.GeneratedMessage.Builder<Builder>
37003        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponseOrBuilder {
37004       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()37005           getDescriptor() {
37006         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_descriptor;
37007       }
37008 
37009       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()37010           internalGetFieldAccessorTable() {
37011         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_fieldAccessorTable
37012             .ensureFieldAccessorsInitialized(
37013                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.Builder.class);
37014       }
37015 
37016       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.newBuilder()
Builder()37017       private Builder() {
37018         maybeForceBuilderInitialization();
37019       }
37020 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)37021       private Builder(
37022           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
37023         super(parent);
37024         maybeForceBuilderInitialization();
37025       }
maybeForceBuilderInitialization()37026       private void maybeForceBuilderInitialization() {
37027         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
37028           getSnapshotFieldBuilder();
37029         }
37030       }
create()37031       private static Builder create() {
37032         return new Builder();
37033       }
37034 
clear()37035       public Builder clear() {
37036         super.clear();
37037         done_ = false;
37038         bitField0_ = (bitField0_ & ~0x00000001);
37039         if (snapshotBuilder_ == null) {
37040           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37041         } else {
37042           snapshotBuilder_.clear();
37043         }
37044         bitField0_ = (bitField0_ & ~0x00000002);
37045         return this;
37046       }
37047 
clone()37048       public Builder clone() {
37049         return create().mergeFrom(buildPartial());
37050       }
37051 
37052       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()37053           getDescriptorForType() {
37054         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsSnapshotDoneResponse_descriptor;
37055       }
37056 
getDefaultInstanceForType()37057       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse getDefaultInstanceForType() {
37058         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
37059       }
37060 
build()37061       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse build() {
37062         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse result = buildPartial();
37063         if (!result.isInitialized()) {
37064           throw newUninitializedMessageException(result);
37065         }
37066         return result;
37067       }
37068 
buildPartial()37069       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse buildPartial() {
37070         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse(this);
37071         int from_bitField0_ = bitField0_;
37072         int to_bitField0_ = 0;
37073         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
37074           to_bitField0_ |= 0x00000001;
37075         }
37076         result.done_ = done_;
37077         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
37078           to_bitField0_ |= 0x00000002;
37079         }
37080         if (snapshotBuilder_ == null) {
37081           result.snapshot_ = snapshot_;
37082         } else {
37083           result.snapshot_ = snapshotBuilder_.build();
37084         }
37085         result.bitField0_ = to_bitField0_;
37086         onBuilt();
37087         return result;
37088       }
37089 
mergeFrom(com.google.protobuf.Message other)37090       public Builder mergeFrom(com.google.protobuf.Message other) {
37091         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) {
37092           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse)other);
37093         } else {
37094           super.mergeFrom(other);
37095           return this;
37096         }
37097       }
37098 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse other)37099       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse other) {
37100         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance()) return this;
37101         if (other.hasDone()) {
37102           setDone(other.getDone());
37103         }
37104         if (other.hasSnapshot()) {
37105           mergeSnapshot(other.getSnapshot());
37106         }
37107         this.mergeUnknownFields(other.getUnknownFields());
37108         return this;
37109       }
37110 
isInitialized()37111       public final boolean isInitialized() {
37112         if (hasSnapshot()) {
37113           if (!getSnapshot().isInitialized()) {
37114 
37115             return false;
37116           }
37117         }
37118         return true;
37119       }
37120 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37121       public Builder mergeFrom(
37122           com.google.protobuf.CodedInputStream input,
37123           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37124           throws java.io.IOException {
37125         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse parsedMessage = null;
37126         try {
37127           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
37128         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
37129           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) e.getUnfinishedMessage();
37130           throw e;
37131         } finally {
37132           if (parsedMessage != null) {
37133             mergeFrom(parsedMessage);
37134           }
37135         }
37136         return this;
37137       }
37138       private int bitField0_;
37139 
37140       // optional bool done = 1 [default = false];
37141       private boolean done_ ;
37142       /**
37143        * <code>optional bool done = 1 [default = false];</code>
37144        */
hasDone()37145       public boolean hasDone() {
37146         return ((bitField0_ & 0x00000001) == 0x00000001);
37147       }
37148       /**
37149        * <code>optional bool done = 1 [default = false];</code>
37150        */
getDone()37151       public boolean getDone() {
37152         return done_;
37153       }
37154       /**
37155        * <code>optional bool done = 1 [default = false];</code>
37156        */
setDone(boolean value)37157       public Builder setDone(boolean value) {
37158         bitField0_ |= 0x00000001;
37159         done_ = value;
37160         onChanged();
37161         return this;
37162       }
37163       /**
37164        * <code>optional bool done = 1 [default = false];</code>
37165        */
clearDone()37166       public Builder clearDone() {
37167         bitField0_ = (bitField0_ & ~0x00000001);
37168         done_ = false;
37169         onChanged();
37170         return this;
37171       }
37172 
37173       // optional .SnapshotDescription snapshot = 2;
37174       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37175       private com.google.protobuf.SingleFieldBuilder<
37176           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
37177       /**
37178        * <code>optional .SnapshotDescription snapshot = 2;</code>
37179        */
hasSnapshot()37180       public boolean hasSnapshot() {
37181         return ((bitField0_ & 0x00000002) == 0x00000002);
37182       }
37183       /**
37184        * <code>optional .SnapshotDescription snapshot = 2;</code>
37185        */
getSnapshot()37186       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
37187         if (snapshotBuilder_ == null) {
37188           return snapshot_;
37189         } else {
37190           return snapshotBuilder_.getMessage();
37191         }
37192       }
37193       /**
37194        * <code>optional .SnapshotDescription snapshot = 2;</code>
37195        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)37196       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
37197         if (snapshotBuilder_ == null) {
37198           if (value == null) {
37199             throw new NullPointerException();
37200           }
37201           snapshot_ = value;
37202           onChanged();
37203         } else {
37204           snapshotBuilder_.setMessage(value);
37205         }
37206         bitField0_ |= 0x00000002;
37207         return this;
37208       }
37209       /**
37210        * <code>optional .SnapshotDescription snapshot = 2;</code>
37211        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)37212       public Builder setSnapshot(
37213           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
37214         if (snapshotBuilder_ == null) {
37215           snapshot_ = builderForValue.build();
37216           onChanged();
37217         } else {
37218           snapshotBuilder_.setMessage(builderForValue.build());
37219         }
37220         bitField0_ |= 0x00000002;
37221         return this;
37222       }
37223       /**
37224        * <code>optional .SnapshotDescription snapshot = 2;</code>
37225        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)37226       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
37227         if (snapshotBuilder_ == null) {
37228           if (((bitField0_ & 0x00000002) == 0x00000002) &&
37229               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
37230             snapshot_ =
37231               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
37232           } else {
37233             snapshot_ = value;
37234           }
37235           onChanged();
37236         } else {
37237           snapshotBuilder_.mergeFrom(value);
37238         }
37239         bitField0_ |= 0x00000002;
37240         return this;
37241       }
37242       /**
37243        * <code>optional .SnapshotDescription snapshot = 2;</code>
37244        */
clearSnapshot()37245       public Builder clearSnapshot() {
37246         if (snapshotBuilder_ == null) {
37247           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37248           onChanged();
37249         } else {
37250           snapshotBuilder_.clear();
37251         }
37252         bitField0_ = (bitField0_ & ~0x00000002);
37253         return this;
37254       }
37255       /**
37256        * <code>optional .SnapshotDescription snapshot = 2;</code>
37257        */
getSnapshotBuilder()37258       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
37259         bitField0_ |= 0x00000002;
37260         onChanged();
37261         return getSnapshotFieldBuilder().getBuilder();
37262       }
37263       /**
37264        * <code>optional .SnapshotDescription snapshot = 2;</code>
37265        */
getSnapshotOrBuilder()37266       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
37267         if (snapshotBuilder_ != null) {
37268           return snapshotBuilder_.getMessageOrBuilder();
37269         } else {
37270           return snapshot_;
37271         }
37272       }
37273       /**
37274        * <code>optional .SnapshotDescription snapshot = 2;</code>
37275        */
37276       private com.google.protobuf.SingleFieldBuilder<
37277           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotFieldBuilder()37278           getSnapshotFieldBuilder() {
37279         if (snapshotBuilder_ == null) {
37280           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
37281               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
37282                   snapshot_,
37283                   getParentForChildren(),
37284                   isClean());
37285           snapshot_ = null;
37286         }
37287         return snapshotBuilder_;
37288       }
37289 
37290       // @@protoc_insertion_point(builder_scope:IsSnapshotDoneResponse)
37291     }
37292 
37293     static {
37294       defaultInstance = new IsSnapshotDoneResponse(true);
defaultInstance.initFields()37295       defaultInstance.initFields();
37296     }
37297 
37298     // @@protoc_insertion_point(class_scope:IsSnapshotDoneResponse)
37299   }
37300 
37301   public interface IsRestoreSnapshotDoneRequestOrBuilder
37302       extends com.google.protobuf.MessageOrBuilder {
37303 
37304     // optional .SnapshotDescription snapshot = 1;
37305     /**
37306      * <code>optional .SnapshotDescription snapshot = 1;</code>
37307      */
hasSnapshot()37308     boolean hasSnapshot();
37309     /**
37310      * <code>optional .SnapshotDescription snapshot = 1;</code>
37311      */
getSnapshot()37312     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot();
37313     /**
37314      * <code>optional .SnapshotDescription snapshot = 1;</code>
37315      */
getSnapshotOrBuilder()37316     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder();
37317   }
37318   /**
37319    * Protobuf type {@code IsRestoreSnapshotDoneRequest}
37320    */
37321   public static final class IsRestoreSnapshotDoneRequest extends
37322       com.google.protobuf.GeneratedMessage
37323       implements IsRestoreSnapshotDoneRequestOrBuilder {
37324     // Use IsRestoreSnapshotDoneRequest.newBuilder() to construct.
IsRestoreSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)37325     private IsRestoreSnapshotDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
37326       super(builder);
37327       this.unknownFields = builder.getUnknownFields();
37328     }
IsRestoreSnapshotDoneRequest(boolean noInit)37329     private IsRestoreSnapshotDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
37330 
37331     private static final IsRestoreSnapshotDoneRequest defaultInstance;
getDefaultInstance()37332     public static IsRestoreSnapshotDoneRequest getDefaultInstance() {
37333       return defaultInstance;
37334     }
37335 
getDefaultInstanceForType()37336     public IsRestoreSnapshotDoneRequest getDefaultInstanceForType() {
37337       return defaultInstance;
37338     }
37339 
37340     private final com.google.protobuf.UnknownFieldSet unknownFields;
37341     @java.lang.Override
37342     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()37343         getUnknownFields() {
37344       return this.unknownFields;
37345     }
IsRestoreSnapshotDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37346     private IsRestoreSnapshotDoneRequest(
37347         com.google.protobuf.CodedInputStream input,
37348         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37349         throws com.google.protobuf.InvalidProtocolBufferException {
37350       initFields();
37351       int mutable_bitField0_ = 0;
37352       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
37353           com.google.protobuf.UnknownFieldSet.newBuilder();
37354       try {
37355         boolean done = false;
37356         while (!done) {
37357           int tag = input.readTag();
37358           switch (tag) {
37359             case 0:
37360               done = true;
37361               break;
37362             default: {
37363               if (!parseUnknownField(input, unknownFields,
37364                                      extensionRegistry, tag)) {
37365                 done = true;
37366               }
37367               break;
37368             }
37369             case 10: {
37370               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder subBuilder = null;
37371               if (((bitField0_ & 0x00000001) == 0x00000001)) {
37372                 subBuilder = snapshot_.toBuilder();
37373               }
37374               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.PARSER, extensionRegistry);
37375               if (subBuilder != null) {
37376                 subBuilder.mergeFrom(snapshot_);
37377                 snapshot_ = subBuilder.buildPartial();
37378               }
37379               bitField0_ |= 0x00000001;
37380               break;
37381             }
37382           }
37383         }
37384       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
37385         throw e.setUnfinishedMessage(this);
37386       } catch (java.io.IOException e) {
37387         throw new com.google.protobuf.InvalidProtocolBufferException(
37388             e.getMessage()).setUnfinishedMessage(this);
37389       } finally {
37390         this.unknownFields = unknownFields.build();
37391         makeExtensionsImmutable();
37392       }
37393     }
37394     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()37395         getDescriptor() {
37396       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor;
37397     }
37398 
37399     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()37400         internalGetFieldAccessorTable() {
37401       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable
37402           .ensureFieldAccessorsInitialized(
37403               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.Builder.class);
37404     }
37405 
37406     public static com.google.protobuf.Parser<IsRestoreSnapshotDoneRequest> PARSER =
37407         new com.google.protobuf.AbstractParser<IsRestoreSnapshotDoneRequest>() {
37408       public IsRestoreSnapshotDoneRequest parsePartialFrom(
37409           com.google.protobuf.CodedInputStream input,
37410           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37411           throws com.google.protobuf.InvalidProtocolBufferException {
37412         return new IsRestoreSnapshotDoneRequest(input, extensionRegistry);
37413       }
37414     };
37415 
37416     @java.lang.Override
getParserForType()37417     public com.google.protobuf.Parser<IsRestoreSnapshotDoneRequest> getParserForType() {
37418       return PARSER;
37419     }
37420 
37421     private int bitField0_;
37422     // optional .SnapshotDescription snapshot = 1;
37423     public static final int SNAPSHOT_FIELD_NUMBER = 1;
37424     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_;
37425     /**
37426      * <code>optional .SnapshotDescription snapshot = 1;</code>
37427      */
hasSnapshot()37428     public boolean hasSnapshot() {
37429       return ((bitField0_ & 0x00000001) == 0x00000001);
37430     }
37431     /**
37432      * <code>optional .SnapshotDescription snapshot = 1;</code>
37433      */
getSnapshot()37434     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
37435       return snapshot_;
37436     }
37437     /**
37438      * <code>optional .SnapshotDescription snapshot = 1;</code>
37439      */
getSnapshotOrBuilder()37440     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
37441       return snapshot_;
37442     }
37443 
initFields()37444     private void initFields() {
37445       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37446     }
37447     private byte memoizedIsInitialized = -1;
isInitialized()37448     public final boolean isInitialized() {
37449       byte isInitialized = memoizedIsInitialized;
37450       if (isInitialized != -1) return isInitialized == 1;
37451 
37452       if (hasSnapshot()) {
37453         if (!getSnapshot().isInitialized()) {
37454           memoizedIsInitialized = 0;
37455           return false;
37456         }
37457       }
37458       memoizedIsInitialized = 1;
37459       return true;
37460     }
37461 
writeTo(com.google.protobuf.CodedOutputStream output)37462     public void writeTo(com.google.protobuf.CodedOutputStream output)
37463                         throws java.io.IOException {
37464       getSerializedSize();
37465       if (((bitField0_ & 0x00000001) == 0x00000001)) {
37466         output.writeMessage(1, snapshot_);
37467       }
37468       getUnknownFields().writeTo(output);
37469     }
37470 
37471     private int memoizedSerializedSize = -1;
getSerializedSize()37472     public int getSerializedSize() {
37473       int size = memoizedSerializedSize;
37474       if (size != -1) return size;
37475 
37476       size = 0;
37477       if (((bitField0_ & 0x00000001) == 0x00000001)) {
37478         size += com.google.protobuf.CodedOutputStream
37479           .computeMessageSize(1, snapshot_);
37480       }
37481       size += getUnknownFields().getSerializedSize();
37482       memoizedSerializedSize = size;
37483       return size;
37484     }
37485 
37486     private static final long serialVersionUID = 0L;
37487     @java.lang.Override
writeReplace()37488     protected java.lang.Object writeReplace()
37489         throws java.io.ObjectStreamException {
37490       return super.writeReplace();
37491     }
37492 
37493     @java.lang.Override
equals(final java.lang.Object obj)37494     public boolean equals(final java.lang.Object obj) {
37495       if (obj == this) {
37496        return true;
37497       }
37498       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)) {
37499         return super.equals(obj);
37500       }
37501       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) obj;
37502 
37503       boolean result = true;
37504       result = result && (hasSnapshot() == other.hasSnapshot());
37505       if (hasSnapshot()) {
37506         result = result && getSnapshot()
37507             .equals(other.getSnapshot());
37508       }
37509       result = result &&
37510           getUnknownFields().equals(other.getUnknownFields());
37511       return result;
37512     }
37513 
37514     private int memoizedHashCode = 0;
37515     @java.lang.Override
hashCode()37516     public int hashCode() {
37517       if (memoizedHashCode != 0) {
37518         return memoizedHashCode;
37519       }
37520       int hash = 41;
37521       hash = (19 * hash) + getDescriptorForType().hashCode();
37522       if (hasSnapshot()) {
37523         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
37524         hash = (53 * hash) + getSnapshot().hashCode();
37525       }
37526       hash = (29 * hash) + getUnknownFields().hashCode();
37527       memoizedHashCode = hash;
37528       return hash;
37529     }
37530 
parseFrom( com.google.protobuf.ByteString data)37531     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(
37532         com.google.protobuf.ByteString data)
37533         throws com.google.protobuf.InvalidProtocolBufferException {
37534       return PARSER.parseFrom(data);
37535     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37536     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(
37537         com.google.protobuf.ByteString data,
37538         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37539         throws com.google.protobuf.InvalidProtocolBufferException {
37540       return PARSER.parseFrom(data, extensionRegistry);
37541     }
parseFrom(byte[] data)37542     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(byte[] data)
37543         throws com.google.protobuf.InvalidProtocolBufferException {
37544       return PARSER.parseFrom(data);
37545     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37546     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(
37547         byte[] data,
37548         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37549         throws com.google.protobuf.InvalidProtocolBufferException {
37550       return PARSER.parseFrom(data, extensionRegistry);
37551     }
parseFrom(java.io.InputStream input)37552     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(java.io.InputStream input)
37553         throws java.io.IOException {
37554       return PARSER.parseFrom(input);
37555     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37556     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(
37557         java.io.InputStream input,
37558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37559         throws java.io.IOException {
37560       return PARSER.parseFrom(input, extensionRegistry);
37561     }
parseDelimitedFrom(java.io.InputStream input)37562     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom(java.io.InputStream input)
37563         throws java.io.IOException {
37564       return PARSER.parseDelimitedFrom(input);
37565     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37566     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseDelimitedFrom(
37567         java.io.InputStream input,
37568         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37569         throws java.io.IOException {
37570       return PARSER.parseDelimitedFrom(input, extensionRegistry);
37571     }
parseFrom( com.google.protobuf.CodedInputStream input)37572     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(
37573         com.google.protobuf.CodedInputStream input)
37574         throws java.io.IOException {
37575       return PARSER.parseFrom(input);
37576     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37577     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parseFrom(
37578         com.google.protobuf.CodedInputStream input,
37579         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37580         throws java.io.IOException {
37581       return PARSER.parseFrom(input, extensionRegistry);
37582     }
37583 
newBuilder()37584     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()37585     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest prototype)37586     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest prototype) {
37587       return newBuilder().mergeFrom(prototype);
37588     }
toBuilder()37589     public Builder toBuilder() { return newBuilder(this); }
37590 
37591     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)37592     protected Builder newBuilderForType(
37593         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
37594       Builder builder = new Builder(parent);
37595       return builder;
37596     }
37597     /**
37598      * Protobuf type {@code IsRestoreSnapshotDoneRequest}
37599      */
37600     public static final class Builder extends
37601         com.google.protobuf.GeneratedMessage.Builder<Builder>
37602        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequestOrBuilder {
37603       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()37604           getDescriptor() {
37605         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor;
37606       }
37607 
37608       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()37609           internalGetFieldAccessorTable() {
37610         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable
37611             .ensureFieldAccessorsInitialized(
37612                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.Builder.class);
37613       }
37614 
37615       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.newBuilder()
Builder()37616       private Builder() {
37617         maybeForceBuilderInitialization();
37618       }
37619 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)37620       private Builder(
37621           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
37622         super(parent);
37623         maybeForceBuilderInitialization();
37624       }
maybeForceBuilderInitialization()37625       private void maybeForceBuilderInitialization() {
37626         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
37627           getSnapshotFieldBuilder();
37628         }
37629       }
create()37630       private static Builder create() {
37631         return new Builder();
37632       }
37633 
clear()37634       public Builder clear() {
37635         super.clear();
37636         if (snapshotBuilder_ == null) {
37637           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37638         } else {
37639           snapshotBuilder_.clear();
37640         }
37641         bitField0_ = (bitField0_ & ~0x00000001);
37642         return this;
37643       }
37644 
clone()37645       public Builder clone() {
37646         return create().mergeFrom(buildPartial());
37647       }
37648 
37649       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()37650           getDescriptorForType() {
37651         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneRequest_descriptor;
37652       }
37653 
getDefaultInstanceForType()37654       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest getDefaultInstanceForType() {
37655         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
37656       }
37657 
build()37658       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest build() {
37659         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest result = buildPartial();
37660         if (!result.isInitialized()) {
37661           throw newUninitializedMessageException(result);
37662         }
37663         return result;
37664       }
37665 
buildPartial()37666       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest buildPartial() {
37667         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest(this);
37668         int from_bitField0_ = bitField0_;
37669         int to_bitField0_ = 0;
37670         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
37671           to_bitField0_ |= 0x00000001;
37672         }
37673         if (snapshotBuilder_ == null) {
37674           result.snapshot_ = snapshot_;
37675         } else {
37676           result.snapshot_ = snapshotBuilder_.build();
37677         }
37678         result.bitField0_ = to_bitField0_;
37679         onBuilt();
37680         return result;
37681       }
37682 
mergeFrom(com.google.protobuf.Message other)37683       public Builder mergeFrom(com.google.protobuf.Message other) {
37684         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) {
37685           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)other);
37686         } else {
37687           super.mergeFrom(other);
37688           return this;
37689         }
37690       }
37691 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest other)37692       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest other) {
37693         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance()) return this;
37694         if (other.hasSnapshot()) {
37695           mergeSnapshot(other.getSnapshot());
37696         }
37697         this.mergeUnknownFields(other.getUnknownFields());
37698         return this;
37699       }
37700 
isInitialized()37701       public final boolean isInitialized() {
37702         if (hasSnapshot()) {
37703           if (!getSnapshot().isInitialized()) {
37704 
37705             return false;
37706           }
37707         }
37708         return true;
37709       }
37710 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37711       public Builder mergeFrom(
37712           com.google.protobuf.CodedInputStream input,
37713           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37714           throws java.io.IOException {
37715         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest parsedMessage = null;
37716         try {
37717           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
37718         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
37719           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest) e.getUnfinishedMessage();
37720           throw e;
37721         } finally {
37722           if (parsedMessage != null) {
37723             mergeFrom(parsedMessage);
37724           }
37725         }
37726         return this;
37727       }
37728       private int bitField0_;
37729 
37730       // optional .SnapshotDescription snapshot = 1;
37731       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37732       private com.google.protobuf.SingleFieldBuilder<
37733           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder> snapshotBuilder_;
37734       /**
37735        * <code>optional .SnapshotDescription snapshot = 1;</code>
37736        */
hasSnapshot()37737       public boolean hasSnapshot() {
37738         return ((bitField0_ & 0x00000001) == 0x00000001);
37739       }
37740       /**
37741        * <code>optional .SnapshotDescription snapshot = 1;</code>
37742        */
getSnapshot()37743       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getSnapshot() {
37744         if (snapshotBuilder_ == null) {
37745           return snapshot_;
37746         } else {
37747           return snapshotBuilder_.getMessage();
37748         }
37749       }
37750       /**
37751        * <code>optional .SnapshotDescription snapshot = 1;</code>
37752        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)37753       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
37754         if (snapshotBuilder_ == null) {
37755           if (value == null) {
37756             throw new NullPointerException();
37757           }
37758           snapshot_ = value;
37759           onChanged();
37760         } else {
37761           snapshotBuilder_.setMessage(value);
37762         }
37763         bitField0_ |= 0x00000001;
37764         return this;
37765       }
37766       /**
37767        * <code>optional .SnapshotDescription snapshot = 1;</code>
37768        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue)37769       public Builder setSnapshot(
37770           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder builderForValue) {
37771         if (snapshotBuilder_ == null) {
37772           snapshot_ = builderForValue.build();
37773           onChanged();
37774         } else {
37775           snapshotBuilder_.setMessage(builderForValue.build());
37776         }
37777         bitField0_ |= 0x00000001;
37778         return this;
37779       }
37780       /**
37781        * <code>optional .SnapshotDescription snapshot = 1;</code>
37782        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value)37783       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription value) {
37784         if (snapshotBuilder_ == null) {
37785           if (((bitField0_ & 0x00000001) == 0x00000001) &&
37786               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) {
37787             snapshot_ =
37788               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
37789           } else {
37790             snapshot_ = value;
37791           }
37792           onChanged();
37793         } else {
37794           snapshotBuilder_.mergeFrom(value);
37795         }
37796         bitField0_ |= 0x00000001;
37797         return this;
37798       }
37799       /**
37800        * <code>optional .SnapshotDescription snapshot = 1;</code>
37801        */
clearSnapshot()37802       public Builder clearSnapshot() {
37803         if (snapshotBuilder_ == null) {
37804           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance();
37805           onChanged();
37806         } else {
37807           snapshotBuilder_.clear();
37808         }
37809         bitField0_ = (bitField0_ & ~0x00000001);
37810         return this;
37811       }
37812       /**
37813        * <code>optional .SnapshotDescription snapshot = 1;</code>
37814        */
getSnapshotBuilder()37815       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder getSnapshotBuilder() {
37816         bitField0_ |= 0x00000001;
37817         onChanged();
37818         return getSnapshotFieldBuilder().getBuilder();
37819       }
37820       /**
37821        * <code>optional .SnapshotDescription snapshot = 1;</code>
37822        */
getSnapshotOrBuilder()37823       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder getSnapshotOrBuilder() {
37824         if (snapshotBuilder_ != null) {
37825           return snapshotBuilder_.getMessageOrBuilder();
37826         } else {
37827           return snapshot_;
37828         }
37829       }
37830       /**
37831        * <code>optional .SnapshotDescription snapshot = 1;</code>
37832        */
37833       private com.google.protobuf.SingleFieldBuilder<
37834           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>
getSnapshotFieldBuilder()37835           getSnapshotFieldBuilder() {
37836         if (snapshotBuilder_ == null) {
37837           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
37838               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder>(
37839                   snapshot_,
37840                   getParentForChildren(),
37841                   isClean());
37842           snapshot_ = null;
37843         }
37844         return snapshotBuilder_;
37845       }
37846 
37847       // @@protoc_insertion_point(builder_scope:IsRestoreSnapshotDoneRequest)
37848     }
37849 
37850     static {
37851       defaultInstance = new IsRestoreSnapshotDoneRequest(true);
defaultInstance.initFields()37852       defaultInstance.initFields();
37853     }
37854 
37855     // @@protoc_insertion_point(class_scope:IsRestoreSnapshotDoneRequest)
37856   }
37857 
37858   public interface IsRestoreSnapshotDoneResponseOrBuilder
37859       extends com.google.protobuf.MessageOrBuilder {
37860 
37861     // optional bool done = 1 [default = false];
37862     /**
37863      * <code>optional bool done = 1 [default = false];</code>
37864      */
hasDone()37865     boolean hasDone();
37866     /**
37867      * <code>optional bool done = 1 [default = false];</code>
37868      */
getDone()37869     boolean getDone();
37870   }
37871   /**
37872    * Protobuf type {@code IsRestoreSnapshotDoneResponse}
37873    */
37874   public static final class IsRestoreSnapshotDoneResponse extends
37875       com.google.protobuf.GeneratedMessage
37876       implements IsRestoreSnapshotDoneResponseOrBuilder {
37877     // Use IsRestoreSnapshotDoneResponse.newBuilder() to construct.
IsRestoreSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)37878     private IsRestoreSnapshotDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
37879       super(builder);
37880       this.unknownFields = builder.getUnknownFields();
37881     }
IsRestoreSnapshotDoneResponse(boolean noInit)37882     private IsRestoreSnapshotDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
37883 
37884     private static final IsRestoreSnapshotDoneResponse defaultInstance;
getDefaultInstance()37885     public static IsRestoreSnapshotDoneResponse getDefaultInstance() {
37886       return defaultInstance;
37887     }
37888 
getDefaultInstanceForType()37889     public IsRestoreSnapshotDoneResponse getDefaultInstanceForType() {
37890       return defaultInstance;
37891     }
37892 
37893     private final com.google.protobuf.UnknownFieldSet unknownFields;
37894     @java.lang.Override
37895     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()37896         getUnknownFields() {
37897       return this.unknownFields;
37898     }
IsRestoreSnapshotDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)37899     private IsRestoreSnapshotDoneResponse(
37900         com.google.protobuf.CodedInputStream input,
37901         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37902         throws com.google.protobuf.InvalidProtocolBufferException {
37903       initFields();
37904       int mutable_bitField0_ = 0;
37905       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
37906           com.google.protobuf.UnknownFieldSet.newBuilder();
37907       try {
37908         boolean done = false;
37909         while (!done) {
37910           int tag = input.readTag();
37911           switch (tag) {
37912             case 0:
37913               done = true;
37914               break;
37915             default: {
37916               if (!parseUnknownField(input, unknownFields,
37917                                      extensionRegistry, tag)) {
37918                 done = true;
37919               }
37920               break;
37921             }
37922             case 8: {
37923               bitField0_ |= 0x00000001;
37924               done_ = input.readBool();
37925               break;
37926             }
37927           }
37928         }
37929       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
37930         throw e.setUnfinishedMessage(this);
37931       } catch (java.io.IOException e) {
37932         throw new com.google.protobuf.InvalidProtocolBufferException(
37933             e.getMessage()).setUnfinishedMessage(this);
37934       } finally {
37935         this.unknownFields = unknownFields.build();
37936         makeExtensionsImmutable();
37937       }
37938     }
37939     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()37940         getDescriptor() {
37941       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor;
37942     }
37943 
37944     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()37945         internalGetFieldAccessorTable() {
37946       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable
37947           .ensureFieldAccessorsInitialized(
37948               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.Builder.class);
37949     }
37950 
37951     public static com.google.protobuf.Parser<IsRestoreSnapshotDoneResponse> PARSER =
37952         new com.google.protobuf.AbstractParser<IsRestoreSnapshotDoneResponse>() {
37953       public IsRestoreSnapshotDoneResponse parsePartialFrom(
37954           com.google.protobuf.CodedInputStream input,
37955           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
37956           throws com.google.protobuf.InvalidProtocolBufferException {
37957         return new IsRestoreSnapshotDoneResponse(input, extensionRegistry);
37958       }
37959     };
37960 
37961     @java.lang.Override
getParserForType()37962     public com.google.protobuf.Parser<IsRestoreSnapshotDoneResponse> getParserForType() {
37963       return PARSER;
37964     }
37965 
37966     private int bitField0_;
37967     // optional bool done = 1 [default = false];
37968     public static final int DONE_FIELD_NUMBER = 1;
37969     private boolean done_;
37970     /**
37971      * <code>optional bool done = 1 [default = false];</code>
37972      */
hasDone()37973     public boolean hasDone() {
37974       return ((bitField0_ & 0x00000001) == 0x00000001);
37975     }
37976     /**
37977      * <code>optional bool done = 1 [default = false];</code>
37978      */
getDone()37979     public boolean getDone() {
37980       return done_;
37981     }
37982 
initFields()37983     private void initFields() {
37984       done_ = false;
37985     }
37986     private byte memoizedIsInitialized = -1;
isInitialized()37987     public final boolean isInitialized() {
37988       byte isInitialized = memoizedIsInitialized;
37989       if (isInitialized != -1) return isInitialized == 1;
37990 
37991       memoizedIsInitialized = 1;
37992       return true;
37993     }
37994 
writeTo(com.google.protobuf.CodedOutputStream output)37995     public void writeTo(com.google.protobuf.CodedOutputStream output)
37996                         throws java.io.IOException {
37997       getSerializedSize();
37998       if (((bitField0_ & 0x00000001) == 0x00000001)) {
37999         output.writeBool(1, done_);
38000       }
38001       getUnknownFields().writeTo(output);
38002     }
38003 
38004     private int memoizedSerializedSize = -1;
getSerializedSize()38005     public int getSerializedSize() {
38006       int size = memoizedSerializedSize;
38007       if (size != -1) return size;
38008 
38009       size = 0;
38010       if (((bitField0_ & 0x00000001) == 0x00000001)) {
38011         size += com.google.protobuf.CodedOutputStream
38012           .computeBoolSize(1, done_);
38013       }
38014       size += getUnknownFields().getSerializedSize();
38015       memoizedSerializedSize = size;
38016       return size;
38017     }
38018 
38019     private static final long serialVersionUID = 0L;
38020     @java.lang.Override
writeReplace()38021     protected java.lang.Object writeReplace()
38022         throws java.io.ObjectStreamException {
38023       return super.writeReplace();
38024     }
38025 
38026     @java.lang.Override
equals(final java.lang.Object obj)38027     public boolean equals(final java.lang.Object obj) {
38028       if (obj == this) {
38029        return true;
38030       }
38031       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse)) {
38032         return super.equals(obj);
38033       }
38034       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) obj;
38035 
38036       boolean result = true;
38037       result = result && (hasDone() == other.hasDone());
38038       if (hasDone()) {
38039         result = result && (getDone()
38040             == other.getDone());
38041       }
38042       result = result &&
38043           getUnknownFields().equals(other.getUnknownFields());
38044       return result;
38045     }
38046 
38047     private int memoizedHashCode = 0;
38048     @java.lang.Override
hashCode()38049     public int hashCode() {
38050       if (memoizedHashCode != 0) {
38051         return memoizedHashCode;
38052       }
38053       int hash = 41;
38054       hash = (19 * hash) + getDescriptorForType().hashCode();
38055       if (hasDone()) {
38056         hash = (37 * hash) + DONE_FIELD_NUMBER;
38057         hash = (53 * hash) + hashBoolean(getDone());
38058       }
38059       hash = (29 * hash) + getUnknownFields().hashCode();
38060       memoizedHashCode = hash;
38061       return hash;
38062     }
38063 
parseFrom( com.google.protobuf.ByteString data)38064     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(
38065         com.google.protobuf.ByteString data)
38066         throws com.google.protobuf.InvalidProtocolBufferException {
38067       return PARSER.parseFrom(data);
38068     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38069     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(
38070         com.google.protobuf.ByteString data,
38071         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38072         throws com.google.protobuf.InvalidProtocolBufferException {
38073       return PARSER.parseFrom(data, extensionRegistry);
38074     }
parseFrom(byte[] data)38075     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(byte[] data)
38076         throws com.google.protobuf.InvalidProtocolBufferException {
38077       return PARSER.parseFrom(data);
38078     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38079     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(
38080         byte[] data,
38081         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38082         throws com.google.protobuf.InvalidProtocolBufferException {
38083       return PARSER.parseFrom(data, extensionRegistry);
38084     }
parseFrom(java.io.InputStream input)38085     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(java.io.InputStream input)
38086         throws java.io.IOException {
38087       return PARSER.parseFrom(input);
38088     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38089     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(
38090         java.io.InputStream input,
38091         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38092         throws java.io.IOException {
38093       return PARSER.parseFrom(input, extensionRegistry);
38094     }
parseDelimitedFrom(java.io.InputStream input)38095     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom(java.io.InputStream input)
38096         throws java.io.IOException {
38097       return PARSER.parseDelimitedFrom(input);
38098     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38099     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseDelimitedFrom(
38100         java.io.InputStream input,
38101         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38102         throws java.io.IOException {
38103       return PARSER.parseDelimitedFrom(input, extensionRegistry);
38104     }
parseFrom( com.google.protobuf.CodedInputStream input)38105     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(
38106         com.google.protobuf.CodedInputStream input)
38107         throws java.io.IOException {
38108       return PARSER.parseFrom(input);
38109     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38110     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parseFrom(
38111         com.google.protobuf.CodedInputStream input,
38112         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38113         throws java.io.IOException {
38114       return PARSER.parseFrom(input, extensionRegistry);
38115     }
38116 
newBuilder()38117     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()38118     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse prototype)38119     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse prototype) {
38120       return newBuilder().mergeFrom(prototype);
38121     }
toBuilder()38122     public Builder toBuilder() { return newBuilder(this); }
38123 
38124     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)38125     protected Builder newBuilderForType(
38126         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
38127       Builder builder = new Builder(parent);
38128       return builder;
38129     }
38130     /**
38131      * Protobuf type {@code IsRestoreSnapshotDoneResponse}
38132      */
38133     public static final class Builder extends
38134         com.google.protobuf.GeneratedMessage.Builder<Builder>
38135        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponseOrBuilder {
38136       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()38137           getDescriptor() {
38138         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor;
38139       }
38140 
38141       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()38142           internalGetFieldAccessorTable() {
38143         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable
38144             .ensureFieldAccessorsInitialized(
38145                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.Builder.class);
38146       }
38147 
38148       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.newBuilder()
Builder()38149       private Builder() {
38150         maybeForceBuilderInitialization();
38151       }
38152 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)38153       private Builder(
38154           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
38155         super(parent);
38156         maybeForceBuilderInitialization();
38157       }
maybeForceBuilderInitialization()38158       private void maybeForceBuilderInitialization() {
38159         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
38160         }
38161       }
create()38162       private static Builder create() {
38163         return new Builder();
38164       }
38165 
clear()38166       public Builder clear() {
38167         super.clear();
38168         done_ = false;
38169         bitField0_ = (bitField0_ & ~0x00000001);
38170         return this;
38171       }
38172 
clone()38173       public Builder clone() {
38174         return create().mergeFrom(buildPartial());
38175       }
38176 
38177       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()38178           getDescriptorForType() {
38179         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsRestoreSnapshotDoneResponse_descriptor;
38180       }
38181 
getDefaultInstanceForType()38182       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse getDefaultInstanceForType() {
38183         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
38184       }
38185 
build()38186       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse build() {
38187         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse result = buildPartial();
38188         if (!result.isInitialized()) {
38189           throw newUninitializedMessageException(result);
38190         }
38191         return result;
38192       }
38193 
buildPartial()38194       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse buildPartial() {
38195         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse(this);
38196         int from_bitField0_ = bitField0_;
38197         int to_bitField0_ = 0;
38198         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
38199           to_bitField0_ |= 0x00000001;
38200         }
38201         result.done_ = done_;
38202         result.bitField0_ = to_bitField0_;
38203         onBuilt();
38204         return result;
38205       }
38206 
mergeFrom(com.google.protobuf.Message other)38207       public Builder mergeFrom(com.google.protobuf.Message other) {
38208         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) {
38209           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse)other);
38210         } else {
38211           super.mergeFrom(other);
38212           return this;
38213         }
38214       }
38215 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse other)38216       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse other) {
38217         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance()) return this;
38218         if (other.hasDone()) {
38219           setDone(other.getDone());
38220         }
38221         this.mergeUnknownFields(other.getUnknownFields());
38222         return this;
38223       }
38224 
isInitialized()38225       public final boolean isInitialized() {
38226         return true;
38227       }
38228 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38229       public Builder mergeFrom(
38230           com.google.protobuf.CodedInputStream input,
38231           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38232           throws java.io.IOException {
38233         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse parsedMessage = null;
38234         try {
38235           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
38236         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
38237           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) e.getUnfinishedMessage();
38238           throw e;
38239         } finally {
38240           if (parsedMessage != null) {
38241             mergeFrom(parsedMessage);
38242           }
38243         }
38244         return this;
38245       }
38246       private int bitField0_;
38247 
38248       // optional bool done = 1 [default = false];
38249       private boolean done_ ;
38250       /**
38251        * <code>optional bool done = 1 [default = false];</code>
38252        */
hasDone()38253       public boolean hasDone() {
38254         return ((bitField0_ & 0x00000001) == 0x00000001);
38255       }
38256       /**
38257        * <code>optional bool done = 1 [default = false];</code>
38258        */
getDone()38259       public boolean getDone() {
38260         return done_;
38261       }
38262       /**
38263        * <code>optional bool done = 1 [default = false];</code>
38264        */
setDone(boolean value)38265       public Builder setDone(boolean value) {
38266         bitField0_ |= 0x00000001;
38267         done_ = value;
38268         onChanged();
38269         return this;
38270       }
38271       /**
38272        * <code>optional bool done = 1 [default = false];</code>
38273        */
clearDone()38274       public Builder clearDone() {
38275         bitField0_ = (bitField0_ & ~0x00000001);
38276         done_ = false;
38277         onChanged();
38278         return this;
38279       }
38280 
38281       // @@protoc_insertion_point(builder_scope:IsRestoreSnapshotDoneResponse)
38282     }
38283 
38284     static {
38285       defaultInstance = new IsRestoreSnapshotDoneResponse(true);
defaultInstance.initFields()38286       defaultInstance.initFields();
38287     }
38288 
38289     // @@protoc_insertion_point(class_scope:IsRestoreSnapshotDoneResponse)
38290   }
38291 
38292   public interface GetSchemaAlterStatusRequestOrBuilder
38293       extends com.google.protobuf.MessageOrBuilder {
38294 
38295     // required .TableName table_name = 1;
38296     /**
38297      * <code>required .TableName table_name = 1;</code>
38298      */
hasTableName()38299     boolean hasTableName();
38300     /**
38301      * <code>required .TableName table_name = 1;</code>
38302      */
getTableName()38303     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
38304     /**
38305      * <code>required .TableName table_name = 1;</code>
38306      */
getTableNameOrBuilder()38307     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
38308   }
38309   /**
38310    * Protobuf type {@code GetSchemaAlterStatusRequest}
38311    */
38312   public static final class GetSchemaAlterStatusRequest extends
38313       com.google.protobuf.GeneratedMessage
38314       implements GetSchemaAlterStatusRequestOrBuilder {
38315     // Use GetSchemaAlterStatusRequest.newBuilder() to construct.
GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)38316     private GetSchemaAlterStatusRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
38317       super(builder);
38318       this.unknownFields = builder.getUnknownFields();
38319     }
GetSchemaAlterStatusRequest(boolean noInit)38320     private GetSchemaAlterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
38321 
38322     private static final GetSchemaAlterStatusRequest defaultInstance;
getDefaultInstance()38323     public static GetSchemaAlterStatusRequest getDefaultInstance() {
38324       return defaultInstance;
38325     }
38326 
getDefaultInstanceForType()38327     public GetSchemaAlterStatusRequest getDefaultInstanceForType() {
38328       return defaultInstance;
38329     }
38330 
38331     private final com.google.protobuf.UnknownFieldSet unknownFields;
38332     @java.lang.Override
38333     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()38334         getUnknownFields() {
38335       return this.unknownFields;
38336     }
GetSchemaAlterStatusRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38337     private GetSchemaAlterStatusRequest(
38338         com.google.protobuf.CodedInputStream input,
38339         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38340         throws com.google.protobuf.InvalidProtocolBufferException {
38341       initFields();
38342       int mutable_bitField0_ = 0;
38343       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
38344           com.google.protobuf.UnknownFieldSet.newBuilder();
38345       try {
38346         boolean done = false;
38347         while (!done) {
38348           int tag = input.readTag();
38349           switch (tag) {
38350             case 0:
38351               done = true;
38352               break;
38353             default: {
38354               if (!parseUnknownField(input, unknownFields,
38355                                      extensionRegistry, tag)) {
38356                 done = true;
38357               }
38358               break;
38359             }
38360             case 10: {
38361               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
38362               if (((bitField0_ & 0x00000001) == 0x00000001)) {
38363                 subBuilder = tableName_.toBuilder();
38364               }
38365               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
38366               if (subBuilder != null) {
38367                 subBuilder.mergeFrom(tableName_);
38368                 tableName_ = subBuilder.buildPartial();
38369               }
38370               bitField0_ |= 0x00000001;
38371               break;
38372             }
38373           }
38374         }
38375       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
38376         throw e.setUnfinishedMessage(this);
38377       } catch (java.io.IOException e) {
38378         throw new com.google.protobuf.InvalidProtocolBufferException(
38379             e.getMessage()).setUnfinishedMessage(this);
38380       } finally {
38381         this.unknownFields = unknownFields.build();
38382         makeExtensionsImmutable();
38383       }
38384     }
38385     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()38386         getDescriptor() {
38387       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor;
38388     }
38389 
38390     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()38391         internalGetFieldAccessorTable() {
38392       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable
38393           .ensureFieldAccessorsInitialized(
38394               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.Builder.class);
38395     }
38396 
38397     public static com.google.protobuf.Parser<GetSchemaAlterStatusRequest> PARSER =
38398         new com.google.protobuf.AbstractParser<GetSchemaAlterStatusRequest>() {
38399       public GetSchemaAlterStatusRequest parsePartialFrom(
38400           com.google.protobuf.CodedInputStream input,
38401           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38402           throws com.google.protobuf.InvalidProtocolBufferException {
38403         return new GetSchemaAlterStatusRequest(input, extensionRegistry);
38404       }
38405     };
38406 
38407     @java.lang.Override
getParserForType()38408     public com.google.protobuf.Parser<GetSchemaAlterStatusRequest> getParserForType() {
38409       return PARSER;
38410     }
38411 
38412     private int bitField0_;
38413     // required .TableName table_name = 1;
38414     public static final int TABLE_NAME_FIELD_NUMBER = 1;
38415     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
38416     /**
38417      * <code>required .TableName table_name = 1;</code>
38418      */
hasTableName()38419     public boolean hasTableName() {
38420       return ((bitField0_ & 0x00000001) == 0x00000001);
38421     }
38422     /**
38423      * <code>required .TableName table_name = 1;</code>
38424      */
getTableName()38425     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
38426       return tableName_;
38427     }
38428     /**
38429      * <code>required .TableName table_name = 1;</code>
38430      */
getTableNameOrBuilder()38431     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
38432       return tableName_;
38433     }
38434 
initFields()38435     private void initFields() {
38436       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
38437     }
38438     private byte memoizedIsInitialized = -1;
isInitialized()38439     public final boolean isInitialized() {
38440       byte isInitialized = memoizedIsInitialized;
38441       if (isInitialized != -1) return isInitialized == 1;
38442 
38443       if (!hasTableName()) {
38444         memoizedIsInitialized = 0;
38445         return false;
38446       }
38447       if (!getTableName().isInitialized()) {
38448         memoizedIsInitialized = 0;
38449         return false;
38450       }
38451       memoizedIsInitialized = 1;
38452       return true;
38453     }
38454 
writeTo(com.google.protobuf.CodedOutputStream output)38455     public void writeTo(com.google.protobuf.CodedOutputStream output)
38456                         throws java.io.IOException {
38457       getSerializedSize();
38458       if (((bitField0_ & 0x00000001) == 0x00000001)) {
38459         output.writeMessage(1, tableName_);
38460       }
38461       getUnknownFields().writeTo(output);
38462     }
38463 
38464     private int memoizedSerializedSize = -1;
getSerializedSize()38465     public int getSerializedSize() {
38466       int size = memoizedSerializedSize;
38467       if (size != -1) return size;
38468 
38469       size = 0;
38470       if (((bitField0_ & 0x00000001) == 0x00000001)) {
38471         size += com.google.protobuf.CodedOutputStream
38472           .computeMessageSize(1, tableName_);
38473       }
38474       size += getUnknownFields().getSerializedSize();
38475       memoizedSerializedSize = size;
38476       return size;
38477     }
38478 
38479     private static final long serialVersionUID = 0L;
38480     @java.lang.Override
writeReplace()38481     protected java.lang.Object writeReplace()
38482         throws java.io.ObjectStreamException {
38483       return super.writeReplace();
38484     }
38485 
38486     @java.lang.Override
equals(final java.lang.Object obj)38487     public boolean equals(final java.lang.Object obj) {
38488       if (obj == this) {
38489        return true;
38490       }
38491       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)) {
38492         return super.equals(obj);
38493       }
38494       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) obj;
38495 
38496       boolean result = true;
38497       result = result && (hasTableName() == other.hasTableName());
38498       if (hasTableName()) {
38499         result = result && getTableName()
38500             .equals(other.getTableName());
38501       }
38502       result = result &&
38503           getUnknownFields().equals(other.getUnknownFields());
38504       return result;
38505     }
38506 
38507     private int memoizedHashCode = 0;
38508     @java.lang.Override
hashCode()38509     public int hashCode() {
38510       if (memoizedHashCode != 0) {
38511         return memoizedHashCode;
38512       }
38513       int hash = 41;
38514       hash = (19 * hash) + getDescriptorForType().hashCode();
38515       if (hasTableName()) {
38516         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
38517         hash = (53 * hash) + getTableName().hashCode();
38518       }
38519       hash = (29 * hash) + getUnknownFields().hashCode();
38520       memoizedHashCode = hash;
38521       return hash;
38522     }
38523 
parseFrom( com.google.protobuf.ByteString data)38524     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(
38525         com.google.protobuf.ByteString data)
38526         throws com.google.protobuf.InvalidProtocolBufferException {
38527       return PARSER.parseFrom(data);
38528     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38529     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(
38530         com.google.protobuf.ByteString data,
38531         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38532         throws com.google.protobuf.InvalidProtocolBufferException {
38533       return PARSER.parseFrom(data, extensionRegistry);
38534     }
parseFrom(byte[] data)38535     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(byte[] data)
38536         throws com.google.protobuf.InvalidProtocolBufferException {
38537       return PARSER.parseFrom(data);
38538     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38539     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(
38540         byte[] data,
38541         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38542         throws com.google.protobuf.InvalidProtocolBufferException {
38543       return PARSER.parseFrom(data, extensionRegistry);
38544     }
parseFrom(java.io.InputStream input)38545     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(java.io.InputStream input)
38546         throws java.io.IOException {
38547       return PARSER.parseFrom(input);
38548     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38549     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(
38550         java.io.InputStream input,
38551         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38552         throws java.io.IOException {
38553       return PARSER.parseFrom(input, extensionRegistry);
38554     }
parseDelimitedFrom(java.io.InputStream input)38555     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(java.io.InputStream input)
38556         throws java.io.IOException {
38557       return PARSER.parseDelimitedFrom(input);
38558     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38559     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseDelimitedFrom(
38560         java.io.InputStream input,
38561         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38562         throws java.io.IOException {
38563       return PARSER.parseDelimitedFrom(input, extensionRegistry);
38564     }
parseFrom( com.google.protobuf.CodedInputStream input)38565     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(
38566         com.google.protobuf.CodedInputStream input)
38567         throws java.io.IOException {
38568       return PARSER.parseFrom(input);
38569     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38570     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parseFrom(
38571         com.google.protobuf.CodedInputStream input,
38572         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38573         throws java.io.IOException {
38574       return PARSER.parseFrom(input, extensionRegistry);
38575     }
38576 
newBuilder()38577     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()38578     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest prototype)38579     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest prototype) {
38580       return newBuilder().mergeFrom(prototype);
38581     }
toBuilder()38582     public Builder toBuilder() { return newBuilder(this); }
38583 
38584     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)38585     protected Builder newBuilderForType(
38586         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
38587       Builder builder = new Builder(parent);
38588       return builder;
38589     }
38590     /**
38591      * Protobuf type {@code GetSchemaAlterStatusRequest}
38592      */
38593     public static final class Builder extends
38594         com.google.protobuf.GeneratedMessage.Builder<Builder>
38595        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequestOrBuilder {
38596       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()38597           getDescriptor() {
38598         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor;
38599       }
38600 
38601       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()38602           internalGetFieldAccessorTable() {
38603         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable
38604             .ensureFieldAccessorsInitialized(
38605                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.Builder.class);
38606       }
38607 
38608       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.newBuilder()
Builder()38609       private Builder() {
38610         maybeForceBuilderInitialization();
38611       }
38612 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)38613       private Builder(
38614           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
38615         super(parent);
38616         maybeForceBuilderInitialization();
38617       }
maybeForceBuilderInitialization()38618       private void maybeForceBuilderInitialization() {
38619         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
38620           getTableNameFieldBuilder();
38621         }
38622       }
create()38623       private static Builder create() {
38624         return new Builder();
38625       }
38626 
clear()38627       public Builder clear() {
38628         super.clear();
38629         if (tableNameBuilder_ == null) {
38630           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
38631         } else {
38632           tableNameBuilder_.clear();
38633         }
38634         bitField0_ = (bitField0_ & ~0x00000001);
38635         return this;
38636       }
38637 
clone()38638       public Builder clone() {
38639         return create().mergeFrom(buildPartial());
38640       }
38641 
38642       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()38643           getDescriptorForType() {
38644         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusRequest_descriptor;
38645       }
38646 
getDefaultInstanceForType()38647       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest getDefaultInstanceForType() {
38648         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance();
38649       }
38650 
build()38651       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest build() {
38652         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = buildPartial();
38653         if (!result.isInitialized()) {
38654           throw newUninitializedMessageException(result);
38655         }
38656         return result;
38657       }
38658 
buildPartial()38659       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest buildPartial() {
38660         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest(this);
38661         int from_bitField0_ = bitField0_;
38662         int to_bitField0_ = 0;
38663         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
38664           to_bitField0_ |= 0x00000001;
38665         }
38666         if (tableNameBuilder_ == null) {
38667           result.tableName_ = tableName_;
38668         } else {
38669           result.tableName_ = tableNameBuilder_.build();
38670         }
38671         result.bitField0_ = to_bitField0_;
38672         onBuilt();
38673         return result;
38674       }
38675 
mergeFrom(com.google.protobuf.Message other)38676       public Builder mergeFrom(com.google.protobuf.Message other) {
38677         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) {
38678           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)other);
38679         } else {
38680           super.mergeFrom(other);
38681           return this;
38682         }
38683       }
38684 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other)38685       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest other) {
38686         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance()) return this;
38687         if (other.hasTableName()) {
38688           mergeTableName(other.getTableName());
38689         }
38690         this.mergeUnknownFields(other.getUnknownFields());
38691         return this;
38692       }
38693 
isInitialized()38694       public final boolean isInitialized() {
38695         if (!hasTableName()) {
38696 
38697           return false;
38698         }
38699         if (!getTableName().isInitialized()) {
38700 
38701           return false;
38702         }
38703         return true;
38704       }
38705 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38706       public Builder mergeFrom(
38707           com.google.protobuf.CodedInputStream input,
38708           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38709           throws java.io.IOException {
38710         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest parsedMessage = null;
38711         try {
38712           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
38713         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
38714           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) e.getUnfinishedMessage();
38715           throw e;
38716         } finally {
38717           if (parsedMessage != null) {
38718             mergeFrom(parsedMessage);
38719           }
38720         }
38721         return this;
38722       }
38723       private int bitField0_;
38724 
38725       // required .TableName table_name = 1;
38726       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
38727       private com.google.protobuf.SingleFieldBuilder<
38728           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
38729       /**
38730        * <code>required .TableName table_name = 1;</code>
38731        */
hasTableName()38732       public boolean hasTableName() {
38733         return ((bitField0_ & 0x00000001) == 0x00000001);
38734       }
38735       /**
38736        * <code>required .TableName table_name = 1;</code>
38737        */
getTableName()38738       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
38739         if (tableNameBuilder_ == null) {
38740           return tableName_;
38741         } else {
38742           return tableNameBuilder_.getMessage();
38743         }
38744       }
38745       /**
38746        * <code>required .TableName table_name = 1;</code>
38747        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)38748       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
38749         if (tableNameBuilder_ == null) {
38750           if (value == null) {
38751             throw new NullPointerException();
38752           }
38753           tableName_ = value;
38754           onChanged();
38755         } else {
38756           tableNameBuilder_.setMessage(value);
38757         }
38758         bitField0_ |= 0x00000001;
38759         return this;
38760       }
38761       /**
38762        * <code>required .TableName table_name = 1;</code>
38763        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)38764       public Builder setTableName(
38765           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
38766         if (tableNameBuilder_ == null) {
38767           tableName_ = builderForValue.build();
38768           onChanged();
38769         } else {
38770           tableNameBuilder_.setMessage(builderForValue.build());
38771         }
38772         bitField0_ |= 0x00000001;
38773         return this;
38774       }
38775       /**
38776        * <code>required .TableName table_name = 1;</code>
38777        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)38778       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
38779         if (tableNameBuilder_ == null) {
38780           if (((bitField0_ & 0x00000001) == 0x00000001) &&
38781               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
38782             tableName_ =
38783               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
38784           } else {
38785             tableName_ = value;
38786           }
38787           onChanged();
38788         } else {
38789           tableNameBuilder_.mergeFrom(value);
38790         }
38791         bitField0_ |= 0x00000001;
38792         return this;
38793       }
38794       /**
38795        * <code>required .TableName table_name = 1;</code>
38796        */
clearTableName()38797       public Builder clearTableName() {
38798         if (tableNameBuilder_ == null) {
38799           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
38800           onChanged();
38801         } else {
38802           tableNameBuilder_.clear();
38803         }
38804         bitField0_ = (bitField0_ & ~0x00000001);
38805         return this;
38806       }
38807       /**
38808        * <code>required .TableName table_name = 1;</code>
38809        */
getTableNameBuilder()38810       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
38811         bitField0_ |= 0x00000001;
38812         onChanged();
38813         return getTableNameFieldBuilder().getBuilder();
38814       }
38815       /**
38816        * <code>required .TableName table_name = 1;</code>
38817        */
getTableNameOrBuilder()38818       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
38819         if (tableNameBuilder_ != null) {
38820           return tableNameBuilder_.getMessageOrBuilder();
38821         } else {
38822           return tableName_;
38823         }
38824       }
38825       /**
38826        * <code>required .TableName table_name = 1;</code>
38827        */
38828       private com.google.protobuf.SingleFieldBuilder<
38829           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()38830           getTableNameFieldBuilder() {
38831         if (tableNameBuilder_ == null) {
38832           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
38833               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
38834                   tableName_,
38835                   getParentForChildren(),
38836                   isClean());
38837           tableName_ = null;
38838         }
38839         return tableNameBuilder_;
38840       }
38841 
38842       // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusRequest)
38843     }
38844 
38845     static {
38846       defaultInstance = new GetSchemaAlterStatusRequest(true);
defaultInstance.initFields()38847       defaultInstance.initFields();
38848     }
38849 
38850     // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusRequest)
38851   }
38852 
38853   public interface GetSchemaAlterStatusResponseOrBuilder
38854       extends com.google.protobuf.MessageOrBuilder {
38855 
38856     // optional uint32 yet_to_update_regions = 1;
38857     /**
38858      * <code>optional uint32 yet_to_update_regions = 1;</code>
38859      */
hasYetToUpdateRegions()38860     boolean hasYetToUpdateRegions();
38861     /**
38862      * <code>optional uint32 yet_to_update_regions = 1;</code>
38863      */
getYetToUpdateRegions()38864     int getYetToUpdateRegions();
38865 
38866     // optional uint32 total_regions = 2;
38867     /**
38868      * <code>optional uint32 total_regions = 2;</code>
38869      */
hasTotalRegions()38870     boolean hasTotalRegions();
38871     /**
38872      * <code>optional uint32 total_regions = 2;</code>
38873      */
getTotalRegions()38874     int getTotalRegions();
38875   }
38876   /**
38877    * Protobuf type {@code GetSchemaAlterStatusResponse}
38878    */
38879   public static final class GetSchemaAlterStatusResponse extends
38880       com.google.protobuf.GeneratedMessage
38881       implements GetSchemaAlterStatusResponseOrBuilder {
38882     // Use GetSchemaAlterStatusResponse.newBuilder() to construct.
GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)38883     private GetSchemaAlterStatusResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
38884       super(builder);
38885       this.unknownFields = builder.getUnknownFields();
38886     }
GetSchemaAlterStatusResponse(boolean noInit)38887     private GetSchemaAlterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
38888 
38889     private static final GetSchemaAlterStatusResponse defaultInstance;
getDefaultInstance()38890     public static GetSchemaAlterStatusResponse getDefaultInstance() {
38891       return defaultInstance;
38892     }
38893 
getDefaultInstanceForType()38894     public GetSchemaAlterStatusResponse getDefaultInstanceForType() {
38895       return defaultInstance;
38896     }
38897 
38898     private final com.google.protobuf.UnknownFieldSet unknownFields;
38899     @java.lang.Override
38900     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()38901         getUnknownFields() {
38902       return this.unknownFields;
38903     }
GetSchemaAlterStatusResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)38904     private GetSchemaAlterStatusResponse(
38905         com.google.protobuf.CodedInputStream input,
38906         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38907         throws com.google.protobuf.InvalidProtocolBufferException {
38908       initFields();
38909       int mutable_bitField0_ = 0;
38910       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
38911           com.google.protobuf.UnknownFieldSet.newBuilder();
38912       try {
38913         boolean done = false;
38914         while (!done) {
38915           int tag = input.readTag();
38916           switch (tag) {
38917             case 0:
38918               done = true;
38919               break;
38920             default: {
38921               if (!parseUnknownField(input, unknownFields,
38922                                      extensionRegistry, tag)) {
38923                 done = true;
38924               }
38925               break;
38926             }
38927             case 8: {
38928               bitField0_ |= 0x00000001;
38929               yetToUpdateRegions_ = input.readUInt32();
38930               break;
38931             }
38932             case 16: {
38933               bitField0_ |= 0x00000002;
38934               totalRegions_ = input.readUInt32();
38935               break;
38936             }
38937           }
38938         }
38939       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
38940         throw e.setUnfinishedMessage(this);
38941       } catch (java.io.IOException e) {
38942         throw new com.google.protobuf.InvalidProtocolBufferException(
38943             e.getMessage()).setUnfinishedMessage(this);
38944       } finally {
38945         this.unknownFields = unknownFields.build();
38946         makeExtensionsImmutable();
38947       }
38948     }
38949     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()38950         getDescriptor() {
38951       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor;
38952     }
38953 
38954     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()38955         internalGetFieldAccessorTable() {
38956       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable
38957           .ensureFieldAccessorsInitialized(
38958               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.Builder.class);
38959     }
38960 
38961     public static com.google.protobuf.Parser<GetSchemaAlterStatusResponse> PARSER =
38962         new com.google.protobuf.AbstractParser<GetSchemaAlterStatusResponse>() {
38963       public GetSchemaAlterStatusResponse parsePartialFrom(
38964           com.google.protobuf.CodedInputStream input,
38965           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
38966           throws com.google.protobuf.InvalidProtocolBufferException {
38967         return new GetSchemaAlterStatusResponse(input, extensionRegistry);
38968       }
38969     };
38970 
38971     @java.lang.Override
getParserForType()38972     public com.google.protobuf.Parser<GetSchemaAlterStatusResponse> getParserForType() {
38973       return PARSER;
38974     }
38975 
38976     private int bitField0_;
38977     // optional uint32 yet_to_update_regions = 1;
38978     public static final int YET_TO_UPDATE_REGIONS_FIELD_NUMBER = 1;
38979     private int yetToUpdateRegions_;
38980     /**
38981      * <code>optional uint32 yet_to_update_regions = 1;</code>
38982      */
hasYetToUpdateRegions()38983     public boolean hasYetToUpdateRegions() {
38984       return ((bitField0_ & 0x00000001) == 0x00000001);
38985     }
38986     /**
38987      * <code>optional uint32 yet_to_update_regions = 1;</code>
38988      */
getYetToUpdateRegions()38989     public int getYetToUpdateRegions() {
38990       return yetToUpdateRegions_;
38991     }
38992 
38993     // optional uint32 total_regions = 2;
38994     public static final int TOTAL_REGIONS_FIELD_NUMBER = 2;
38995     private int totalRegions_;
38996     /**
38997      * <code>optional uint32 total_regions = 2;</code>
38998      */
hasTotalRegions()38999     public boolean hasTotalRegions() {
39000       return ((bitField0_ & 0x00000002) == 0x00000002);
39001     }
39002     /**
39003      * <code>optional uint32 total_regions = 2;</code>
39004      */
getTotalRegions()39005     public int getTotalRegions() {
39006       return totalRegions_;
39007     }
39008 
initFields()39009     private void initFields() {
39010       yetToUpdateRegions_ = 0;
39011       totalRegions_ = 0;
39012     }
39013     private byte memoizedIsInitialized = -1;
isInitialized()39014     public final boolean isInitialized() {
39015       byte isInitialized = memoizedIsInitialized;
39016       if (isInitialized != -1) return isInitialized == 1;
39017 
39018       memoizedIsInitialized = 1;
39019       return true;
39020     }
39021 
writeTo(com.google.protobuf.CodedOutputStream output)39022     public void writeTo(com.google.protobuf.CodedOutputStream output)
39023                         throws java.io.IOException {
39024       getSerializedSize();
39025       if (((bitField0_ & 0x00000001) == 0x00000001)) {
39026         output.writeUInt32(1, yetToUpdateRegions_);
39027       }
39028       if (((bitField0_ & 0x00000002) == 0x00000002)) {
39029         output.writeUInt32(2, totalRegions_);
39030       }
39031       getUnknownFields().writeTo(output);
39032     }
39033 
39034     private int memoizedSerializedSize = -1;
getSerializedSize()39035     public int getSerializedSize() {
39036       int size = memoizedSerializedSize;
39037       if (size != -1) return size;
39038 
39039       size = 0;
39040       if (((bitField0_ & 0x00000001) == 0x00000001)) {
39041         size += com.google.protobuf.CodedOutputStream
39042           .computeUInt32Size(1, yetToUpdateRegions_);
39043       }
39044       if (((bitField0_ & 0x00000002) == 0x00000002)) {
39045         size += com.google.protobuf.CodedOutputStream
39046           .computeUInt32Size(2, totalRegions_);
39047       }
39048       size += getUnknownFields().getSerializedSize();
39049       memoizedSerializedSize = size;
39050       return size;
39051     }
39052 
39053     private static final long serialVersionUID = 0L;
39054     @java.lang.Override
writeReplace()39055     protected java.lang.Object writeReplace()
39056         throws java.io.ObjectStreamException {
39057       return super.writeReplace();
39058     }
39059 
39060     @java.lang.Override
equals(final java.lang.Object obj)39061     public boolean equals(final java.lang.Object obj) {
39062       if (obj == this) {
39063        return true;
39064       }
39065       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)) {
39066         return super.equals(obj);
39067       }
39068       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) obj;
39069 
39070       boolean result = true;
39071       result = result && (hasYetToUpdateRegions() == other.hasYetToUpdateRegions());
39072       if (hasYetToUpdateRegions()) {
39073         result = result && (getYetToUpdateRegions()
39074             == other.getYetToUpdateRegions());
39075       }
39076       result = result && (hasTotalRegions() == other.hasTotalRegions());
39077       if (hasTotalRegions()) {
39078         result = result && (getTotalRegions()
39079             == other.getTotalRegions());
39080       }
39081       result = result &&
39082           getUnknownFields().equals(other.getUnknownFields());
39083       return result;
39084     }
39085 
39086     private int memoizedHashCode = 0;
39087     @java.lang.Override
hashCode()39088     public int hashCode() {
39089       if (memoizedHashCode != 0) {
39090         return memoizedHashCode;
39091       }
39092       int hash = 41;
39093       hash = (19 * hash) + getDescriptorForType().hashCode();
39094       if (hasYetToUpdateRegions()) {
39095         hash = (37 * hash) + YET_TO_UPDATE_REGIONS_FIELD_NUMBER;
39096         hash = (53 * hash) + getYetToUpdateRegions();
39097       }
39098       if (hasTotalRegions()) {
39099         hash = (37 * hash) + TOTAL_REGIONS_FIELD_NUMBER;
39100         hash = (53 * hash) + getTotalRegions();
39101       }
39102       hash = (29 * hash) + getUnknownFields().hashCode();
39103       memoizedHashCode = hash;
39104       return hash;
39105     }
39106 
parseFrom( com.google.protobuf.ByteString data)39107     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(
39108         com.google.protobuf.ByteString data)
39109         throws com.google.protobuf.InvalidProtocolBufferException {
39110       return PARSER.parseFrom(data);
39111     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39112     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(
39113         com.google.protobuf.ByteString data,
39114         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39115         throws com.google.protobuf.InvalidProtocolBufferException {
39116       return PARSER.parseFrom(data, extensionRegistry);
39117     }
parseFrom(byte[] data)39118     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(byte[] data)
39119         throws com.google.protobuf.InvalidProtocolBufferException {
39120       return PARSER.parseFrom(data);
39121     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39122     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(
39123         byte[] data,
39124         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39125         throws com.google.protobuf.InvalidProtocolBufferException {
39126       return PARSER.parseFrom(data, extensionRegistry);
39127     }
parseFrom(java.io.InputStream input)39128     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(java.io.InputStream input)
39129         throws java.io.IOException {
39130       return PARSER.parseFrom(input);
39131     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39132     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(
39133         java.io.InputStream input,
39134         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39135         throws java.io.IOException {
39136       return PARSER.parseFrom(input, extensionRegistry);
39137     }
parseDelimitedFrom(java.io.InputStream input)39138     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(java.io.InputStream input)
39139         throws java.io.IOException {
39140       return PARSER.parseDelimitedFrom(input);
39141     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39142     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseDelimitedFrom(
39143         java.io.InputStream input,
39144         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39145         throws java.io.IOException {
39146       return PARSER.parseDelimitedFrom(input, extensionRegistry);
39147     }
parseFrom( com.google.protobuf.CodedInputStream input)39148     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(
39149         com.google.protobuf.CodedInputStream input)
39150         throws java.io.IOException {
39151       return PARSER.parseFrom(input);
39152     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39153     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parseFrom(
39154         com.google.protobuf.CodedInputStream input,
39155         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39156         throws java.io.IOException {
39157       return PARSER.parseFrom(input, extensionRegistry);
39158     }
39159 
newBuilder()39160     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()39161     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse prototype)39162     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse prototype) {
39163       return newBuilder().mergeFrom(prototype);
39164     }
toBuilder()39165     public Builder toBuilder() { return newBuilder(this); }
39166 
39167     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)39168     protected Builder newBuilderForType(
39169         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
39170       Builder builder = new Builder(parent);
39171       return builder;
39172     }
39173     /**
39174      * Protobuf type {@code GetSchemaAlterStatusResponse}
39175      */
39176     public static final class Builder extends
39177         com.google.protobuf.GeneratedMessage.Builder<Builder>
39178        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponseOrBuilder {
39179       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()39180           getDescriptor() {
39181         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor;
39182       }
39183 
39184       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()39185           internalGetFieldAccessorTable() {
39186         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable
39187             .ensureFieldAccessorsInitialized(
39188                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.Builder.class);
39189       }
39190 
39191       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.newBuilder()
Builder()39192       private Builder() {
39193         maybeForceBuilderInitialization();
39194       }
39195 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)39196       private Builder(
39197           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
39198         super(parent);
39199         maybeForceBuilderInitialization();
39200       }
maybeForceBuilderInitialization()39201       private void maybeForceBuilderInitialization() {
39202         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
39203         }
39204       }
create()39205       private static Builder create() {
39206         return new Builder();
39207       }
39208 
clear()39209       public Builder clear() {
39210         super.clear();
39211         yetToUpdateRegions_ = 0;
39212         bitField0_ = (bitField0_ & ~0x00000001);
39213         totalRegions_ = 0;
39214         bitField0_ = (bitField0_ & ~0x00000002);
39215         return this;
39216       }
39217 
clone()39218       public Builder clone() {
39219         return create().mergeFrom(buildPartial());
39220       }
39221 
39222       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()39223           getDescriptorForType() {
39224         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetSchemaAlterStatusResponse_descriptor;
39225       }
39226 
getDefaultInstanceForType()39227       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getDefaultInstanceForType() {
39228         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance();
39229       }
39230 
build()39231       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse build() {
39232         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = buildPartial();
39233         if (!result.isInitialized()) {
39234           throw newUninitializedMessageException(result);
39235         }
39236         return result;
39237       }
39238 
buildPartial()39239       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse buildPartial() {
39240         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse(this);
39241         int from_bitField0_ = bitField0_;
39242         int to_bitField0_ = 0;
39243         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
39244           to_bitField0_ |= 0x00000001;
39245         }
39246         result.yetToUpdateRegions_ = yetToUpdateRegions_;
39247         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
39248           to_bitField0_ |= 0x00000002;
39249         }
39250         result.totalRegions_ = totalRegions_;
39251         result.bitField0_ = to_bitField0_;
39252         onBuilt();
39253         return result;
39254       }
39255 
mergeFrom(com.google.protobuf.Message other)39256       public Builder mergeFrom(com.google.protobuf.Message other) {
39257         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) {
39258           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse)other);
39259         } else {
39260           super.mergeFrom(other);
39261           return this;
39262         }
39263       }
39264 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other)39265       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse other) {
39266         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance()) return this;
39267         if (other.hasYetToUpdateRegions()) {
39268           setYetToUpdateRegions(other.getYetToUpdateRegions());
39269         }
39270         if (other.hasTotalRegions()) {
39271           setTotalRegions(other.getTotalRegions());
39272         }
39273         this.mergeUnknownFields(other.getUnknownFields());
39274         return this;
39275       }
39276 
isInitialized()39277       public final boolean isInitialized() {
39278         return true;
39279       }
39280 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39281       public Builder mergeFrom(
39282           com.google.protobuf.CodedInputStream input,
39283           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39284           throws java.io.IOException {
39285         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse parsedMessage = null;
39286         try {
39287           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
39288         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
39289           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) e.getUnfinishedMessage();
39290           throw e;
39291         } finally {
39292           if (parsedMessage != null) {
39293             mergeFrom(parsedMessage);
39294           }
39295         }
39296         return this;
39297       }
39298       private int bitField0_;
39299 
39300       // optional uint32 yet_to_update_regions = 1;
39301       private int yetToUpdateRegions_ ;
39302       /**
39303        * <code>optional uint32 yet_to_update_regions = 1;</code>
39304        */
hasYetToUpdateRegions()39305       public boolean hasYetToUpdateRegions() {
39306         return ((bitField0_ & 0x00000001) == 0x00000001);
39307       }
39308       /**
39309        * <code>optional uint32 yet_to_update_regions = 1;</code>
39310        */
getYetToUpdateRegions()39311       public int getYetToUpdateRegions() {
39312         return yetToUpdateRegions_;
39313       }
39314       /**
39315        * <code>optional uint32 yet_to_update_regions = 1;</code>
39316        */
setYetToUpdateRegions(int value)39317       public Builder setYetToUpdateRegions(int value) {
39318         bitField0_ |= 0x00000001;
39319         yetToUpdateRegions_ = value;
39320         onChanged();
39321         return this;
39322       }
39323       /**
39324        * <code>optional uint32 yet_to_update_regions = 1;</code>
39325        */
clearYetToUpdateRegions()39326       public Builder clearYetToUpdateRegions() {
39327         bitField0_ = (bitField0_ & ~0x00000001);
39328         yetToUpdateRegions_ = 0;
39329         onChanged();
39330         return this;
39331       }
39332 
39333       // optional uint32 total_regions = 2;
39334       private int totalRegions_ ;
39335       /**
39336        * <code>optional uint32 total_regions = 2;</code>
39337        */
hasTotalRegions()39338       public boolean hasTotalRegions() {
39339         return ((bitField0_ & 0x00000002) == 0x00000002);
39340       }
39341       /**
39342        * <code>optional uint32 total_regions = 2;</code>
39343        */
getTotalRegions()39344       public int getTotalRegions() {
39345         return totalRegions_;
39346       }
39347       /**
39348        * <code>optional uint32 total_regions = 2;</code>
39349        */
setTotalRegions(int value)39350       public Builder setTotalRegions(int value) {
39351         bitField0_ |= 0x00000002;
39352         totalRegions_ = value;
39353         onChanged();
39354         return this;
39355       }
39356       /**
39357        * <code>optional uint32 total_regions = 2;</code>
39358        */
clearTotalRegions()39359       public Builder clearTotalRegions() {
39360         bitField0_ = (bitField0_ & ~0x00000002);
39361         totalRegions_ = 0;
39362         onChanged();
39363         return this;
39364       }
39365 
39366       // @@protoc_insertion_point(builder_scope:GetSchemaAlterStatusResponse)
39367     }
39368 
39369     static {
39370       defaultInstance = new GetSchemaAlterStatusResponse(true);
defaultInstance.initFields()39371       defaultInstance.initFields();
39372     }
39373 
39374     // @@protoc_insertion_point(class_scope:GetSchemaAlterStatusResponse)
39375   }
39376 
39377   public interface GetTableDescriptorsRequestOrBuilder
39378       extends com.google.protobuf.MessageOrBuilder {
39379 
39380     // repeated .TableName table_names = 1;
39381     /**
39382      * <code>repeated .TableName table_names = 1;</code>
39383      */
39384     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>
getTableNamesList()39385         getTableNamesList();
39386     /**
39387      * <code>repeated .TableName table_names = 1;</code>
39388      */
getTableNames(int index)39389     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index);
39390     /**
39391      * <code>repeated .TableName table_names = 1;</code>
39392      */
getTableNamesCount()39393     int getTableNamesCount();
39394     /**
39395      * <code>repeated .TableName table_names = 1;</code>
39396      */
39397     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesOrBuilderList()39398         getTableNamesOrBuilderList();
39399     /**
39400      * <code>repeated .TableName table_names = 1;</code>
39401      */
getTableNamesOrBuilder( int index)39402     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder(
39403         int index);
39404 
39405     // optional string regex = 2;
39406     /**
39407      * <code>optional string regex = 2;</code>
39408      */
hasRegex()39409     boolean hasRegex();
39410     /**
39411      * <code>optional string regex = 2;</code>
39412      */
getRegex()39413     java.lang.String getRegex();
39414     /**
39415      * <code>optional string regex = 2;</code>
39416      */
39417     com.google.protobuf.ByteString
getRegexBytes()39418         getRegexBytes();
39419 
39420     // optional bool include_sys_tables = 3 [default = false];
39421     /**
39422      * <code>optional bool include_sys_tables = 3 [default = false];</code>
39423      */
hasIncludeSysTables()39424     boolean hasIncludeSysTables();
39425     /**
39426      * <code>optional bool include_sys_tables = 3 [default = false];</code>
39427      */
getIncludeSysTables()39428     boolean getIncludeSysTables();
39429 
39430     // optional string namespace = 4;
39431     /**
39432      * <code>optional string namespace = 4;</code>
39433      */
hasNamespace()39434     boolean hasNamespace();
39435     /**
39436      * <code>optional string namespace = 4;</code>
39437      */
getNamespace()39438     java.lang.String getNamespace();
39439     /**
39440      * <code>optional string namespace = 4;</code>
39441      */
39442     com.google.protobuf.ByteString
getNamespaceBytes()39443         getNamespaceBytes();
39444   }
39445   /**
39446    * Protobuf type {@code GetTableDescriptorsRequest}
39447    */
39448   public static final class GetTableDescriptorsRequest extends
39449       com.google.protobuf.GeneratedMessage
39450       implements GetTableDescriptorsRequestOrBuilder {
39451     // Use GetTableDescriptorsRequest.newBuilder() to construct.
GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)39452     private GetTableDescriptorsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
39453       super(builder);
39454       this.unknownFields = builder.getUnknownFields();
39455     }
GetTableDescriptorsRequest(boolean noInit)39456     private GetTableDescriptorsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
39457 
39458     private static final GetTableDescriptorsRequest defaultInstance;
getDefaultInstance()39459     public static GetTableDescriptorsRequest getDefaultInstance() {
39460       return defaultInstance;
39461     }
39462 
getDefaultInstanceForType()39463     public GetTableDescriptorsRequest getDefaultInstanceForType() {
39464       return defaultInstance;
39465     }
39466 
39467     private final com.google.protobuf.UnknownFieldSet unknownFields;
39468     @java.lang.Override
39469     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()39470         getUnknownFields() {
39471       return this.unknownFields;
39472     }
GetTableDescriptorsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39473     private GetTableDescriptorsRequest(
39474         com.google.protobuf.CodedInputStream input,
39475         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39476         throws com.google.protobuf.InvalidProtocolBufferException {
39477       initFields();
39478       int mutable_bitField0_ = 0;
39479       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
39480           com.google.protobuf.UnknownFieldSet.newBuilder();
39481       try {
39482         boolean done = false;
39483         while (!done) {
39484           int tag = input.readTag();
39485           switch (tag) {
39486             case 0:
39487               done = true;
39488               break;
39489             default: {
39490               if (!parseUnknownField(input, unknownFields,
39491                                      extensionRegistry, tag)) {
39492                 done = true;
39493               }
39494               break;
39495             }
39496             case 10: {
39497               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
39498                 tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>();
39499                 mutable_bitField0_ |= 0x00000001;
39500               }
39501               tableNames_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
39502               break;
39503             }
39504             case 18: {
39505               bitField0_ |= 0x00000001;
39506               regex_ = input.readBytes();
39507               break;
39508             }
39509             case 24: {
39510               bitField0_ |= 0x00000002;
39511               includeSysTables_ = input.readBool();
39512               break;
39513             }
39514             case 34: {
39515               bitField0_ |= 0x00000004;
39516               namespace_ = input.readBytes();
39517               break;
39518             }
39519           }
39520         }
39521       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
39522         throw e.setUnfinishedMessage(this);
39523       } catch (java.io.IOException e) {
39524         throw new com.google.protobuf.InvalidProtocolBufferException(
39525             e.getMessage()).setUnfinishedMessage(this);
39526       } finally {
39527         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
39528           tableNames_ = java.util.Collections.unmodifiableList(tableNames_);
39529         }
39530         this.unknownFields = unknownFields.build();
39531         makeExtensionsImmutable();
39532       }
39533     }
39534     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()39535         getDescriptor() {
39536       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor;
39537     }
39538 
39539     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()39540         internalGetFieldAccessorTable() {
39541       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable
39542           .ensureFieldAccessorsInitialized(
39543               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.Builder.class);
39544     }
39545 
39546     public static com.google.protobuf.Parser<GetTableDescriptorsRequest> PARSER =
39547         new com.google.protobuf.AbstractParser<GetTableDescriptorsRequest>() {
39548       public GetTableDescriptorsRequest parsePartialFrom(
39549           com.google.protobuf.CodedInputStream input,
39550           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39551           throws com.google.protobuf.InvalidProtocolBufferException {
39552         return new GetTableDescriptorsRequest(input, extensionRegistry);
39553       }
39554     };
39555 
39556     @java.lang.Override
getParserForType()39557     public com.google.protobuf.Parser<GetTableDescriptorsRequest> getParserForType() {
39558       return PARSER;
39559     }
39560 
39561     private int bitField0_;
39562     // repeated .TableName table_names = 1;
39563     public static final int TABLE_NAMES_FIELD_NUMBER = 1;
39564     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_;
39565     /**
39566      * <code>repeated .TableName table_names = 1;</code>
39567      */
getTableNamesList()39568     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() {
39569       return tableNames_;
39570     }
39571     /**
39572      * <code>repeated .TableName table_names = 1;</code>
39573      */
39574     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesOrBuilderList()39575         getTableNamesOrBuilderList() {
39576       return tableNames_;
39577     }
39578     /**
39579      * <code>repeated .TableName table_names = 1;</code>
39580      */
getTableNamesCount()39581     public int getTableNamesCount() {
39582       return tableNames_.size();
39583     }
39584     /**
39585      * <code>repeated .TableName table_names = 1;</code>
39586      */
getTableNames(int index)39587     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) {
39588       return tableNames_.get(index);
39589     }
39590     /**
39591      * <code>repeated .TableName table_names = 1;</code>
39592      */
getTableNamesOrBuilder( int index)39593     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder(
39594         int index) {
39595       return tableNames_.get(index);
39596     }
39597 
39598     // optional string regex = 2;
39599     public static final int REGEX_FIELD_NUMBER = 2;
39600     private java.lang.Object regex_;
39601     /**
39602      * <code>optional string regex = 2;</code>
39603      */
hasRegex()39604     public boolean hasRegex() {
39605       return ((bitField0_ & 0x00000001) == 0x00000001);
39606     }
39607     /**
39608      * <code>optional string regex = 2;</code>
39609      */
getRegex()39610     public java.lang.String getRegex() {
39611       java.lang.Object ref = regex_;
39612       if (ref instanceof java.lang.String) {
39613         return (java.lang.String) ref;
39614       } else {
39615         com.google.protobuf.ByteString bs =
39616             (com.google.protobuf.ByteString) ref;
39617         java.lang.String s = bs.toStringUtf8();
39618         if (bs.isValidUtf8()) {
39619           regex_ = s;
39620         }
39621         return s;
39622       }
39623     }
39624     /**
39625      * <code>optional string regex = 2;</code>
39626      */
39627     public com.google.protobuf.ByteString
getRegexBytes()39628         getRegexBytes() {
39629       java.lang.Object ref = regex_;
39630       if (ref instanceof java.lang.String) {
39631         com.google.protobuf.ByteString b =
39632             com.google.protobuf.ByteString.copyFromUtf8(
39633                 (java.lang.String) ref);
39634         regex_ = b;
39635         return b;
39636       } else {
39637         return (com.google.protobuf.ByteString) ref;
39638       }
39639     }
39640 
39641     // optional bool include_sys_tables = 3 [default = false];
39642     public static final int INCLUDE_SYS_TABLES_FIELD_NUMBER = 3;
39643     private boolean includeSysTables_;
39644     /**
39645      * <code>optional bool include_sys_tables = 3 [default = false];</code>
39646      */
hasIncludeSysTables()39647     public boolean hasIncludeSysTables() {
39648       return ((bitField0_ & 0x00000002) == 0x00000002);
39649     }
39650     /**
39651      * <code>optional bool include_sys_tables = 3 [default = false];</code>
39652      */
getIncludeSysTables()39653     public boolean getIncludeSysTables() {
39654       return includeSysTables_;
39655     }
39656 
39657     // optional string namespace = 4;
39658     public static final int NAMESPACE_FIELD_NUMBER = 4;
39659     private java.lang.Object namespace_;
39660     /**
39661      * <code>optional string namespace = 4;</code>
39662      */
hasNamespace()39663     public boolean hasNamespace() {
39664       return ((bitField0_ & 0x00000004) == 0x00000004);
39665     }
39666     /**
39667      * <code>optional string namespace = 4;</code>
39668      */
getNamespace()39669     public java.lang.String getNamespace() {
39670       java.lang.Object ref = namespace_;
39671       if (ref instanceof java.lang.String) {
39672         return (java.lang.String) ref;
39673       } else {
39674         com.google.protobuf.ByteString bs =
39675             (com.google.protobuf.ByteString) ref;
39676         java.lang.String s = bs.toStringUtf8();
39677         if (bs.isValidUtf8()) {
39678           namespace_ = s;
39679         }
39680         return s;
39681       }
39682     }
39683     /**
39684      * <code>optional string namespace = 4;</code>
39685      */
39686     public com.google.protobuf.ByteString
getNamespaceBytes()39687         getNamespaceBytes() {
39688       java.lang.Object ref = namespace_;
39689       if (ref instanceof java.lang.String) {
39690         com.google.protobuf.ByteString b =
39691             com.google.protobuf.ByteString.copyFromUtf8(
39692                 (java.lang.String) ref);
39693         namespace_ = b;
39694         return b;
39695       } else {
39696         return (com.google.protobuf.ByteString) ref;
39697       }
39698     }
39699 
initFields()39700     private void initFields() {
39701       tableNames_ = java.util.Collections.emptyList();
39702       regex_ = "";
39703       includeSysTables_ = false;
39704       namespace_ = "";
39705     }
39706     private byte memoizedIsInitialized = -1;
isInitialized()39707     public final boolean isInitialized() {
39708       byte isInitialized = memoizedIsInitialized;
39709       if (isInitialized != -1) return isInitialized == 1;
39710 
39711       for (int i = 0; i < getTableNamesCount(); i++) {
39712         if (!getTableNames(i).isInitialized()) {
39713           memoizedIsInitialized = 0;
39714           return false;
39715         }
39716       }
39717       memoizedIsInitialized = 1;
39718       return true;
39719     }
39720 
writeTo(com.google.protobuf.CodedOutputStream output)39721     public void writeTo(com.google.protobuf.CodedOutputStream output)
39722                         throws java.io.IOException {
39723       getSerializedSize();
39724       for (int i = 0; i < tableNames_.size(); i++) {
39725         output.writeMessage(1, tableNames_.get(i));
39726       }
39727       if (((bitField0_ & 0x00000001) == 0x00000001)) {
39728         output.writeBytes(2, getRegexBytes());
39729       }
39730       if (((bitField0_ & 0x00000002) == 0x00000002)) {
39731         output.writeBool(3, includeSysTables_);
39732       }
39733       if (((bitField0_ & 0x00000004) == 0x00000004)) {
39734         output.writeBytes(4, getNamespaceBytes());
39735       }
39736       getUnknownFields().writeTo(output);
39737     }
39738 
39739     private int memoizedSerializedSize = -1;
getSerializedSize()39740     public int getSerializedSize() {
39741       int size = memoizedSerializedSize;
39742       if (size != -1) return size;
39743 
39744       size = 0;
39745       for (int i = 0; i < tableNames_.size(); i++) {
39746         size += com.google.protobuf.CodedOutputStream
39747           .computeMessageSize(1, tableNames_.get(i));
39748       }
39749       if (((bitField0_ & 0x00000001) == 0x00000001)) {
39750         size += com.google.protobuf.CodedOutputStream
39751           .computeBytesSize(2, getRegexBytes());
39752       }
39753       if (((bitField0_ & 0x00000002) == 0x00000002)) {
39754         size += com.google.protobuf.CodedOutputStream
39755           .computeBoolSize(3, includeSysTables_);
39756       }
39757       if (((bitField0_ & 0x00000004) == 0x00000004)) {
39758         size += com.google.protobuf.CodedOutputStream
39759           .computeBytesSize(4, getNamespaceBytes());
39760       }
39761       size += getUnknownFields().getSerializedSize();
39762       memoizedSerializedSize = size;
39763       return size;
39764     }
39765 
39766     private static final long serialVersionUID = 0L;
39767     @java.lang.Override
writeReplace()39768     protected java.lang.Object writeReplace()
39769         throws java.io.ObjectStreamException {
39770       return super.writeReplace();
39771     }
39772 
39773     @java.lang.Override
equals(final java.lang.Object obj)39774     public boolean equals(final java.lang.Object obj) {
39775       if (obj == this) {
39776        return true;
39777       }
39778       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)) {
39779         return super.equals(obj);
39780       }
39781       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) obj;
39782 
39783       boolean result = true;
39784       result = result && getTableNamesList()
39785           .equals(other.getTableNamesList());
39786       result = result && (hasRegex() == other.hasRegex());
39787       if (hasRegex()) {
39788         result = result && getRegex()
39789             .equals(other.getRegex());
39790       }
39791       result = result && (hasIncludeSysTables() == other.hasIncludeSysTables());
39792       if (hasIncludeSysTables()) {
39793         result = result && (getIncludeSysTables()
39794             == other.getIncludeSysTables());
39795       }
39796       result = result && (hasNamespace() == other.hasNamespace());
39797       if (hasNamespace()) {
39798         result = result && getNamespace()
39799             .equals(other.getNamespace());
39800       }
39801       result = result &&
39802           getUnknownFields().equals(other.getUnknownFields());
39803       return result;
39804     }
39805 
39806     private int memoizedHashCode = 0;
39807     @java.lang.Override
hashCode()39808     public int hashCode() {
39809       if (memoizedHashCode != 0) {
39810         return memoizedHashCode;
39811       }
39812       int hash = 41;
39813       hash = (19 * hash) + getDescriptorForType().hashCode();
39814       if (getTableNamesCount() > 0) {
39815         hash = (37 * hash) + TABLE_NAMES_FIELD_NUMBER;
39816         hash = (53 * hash) + getTableNamesList().hashCode();
39817       }
39818       if (hasRegex()) {
39819         hash = (37 * hash) + REGEX_FIELD_NUMBER;
39820         hash = (53 * hash) + getRegex().hashCode();
39821       }
39822       if (hasIncludeSysTables()) {
39823         hash = (37 * hash) + INCLUDE_SYS_TABLES_FIELD_NUMBER;
39824         hash = (53 * hash) + hashBoolean(getIncludeSysTables());
39825       }
39826       if (hasNamespace()) {
39827         hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
39828         hash = (53 * hash) + getNamespace().hashCode();
39829       }
39830       hash = (29 * hash) + getUnknownFields().hashCode();
39831       memoizedHashCode = hash;
39832       return hash;
39833     }
39834 
parseFrom( com.google.protobuf.ByteString data)39835     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(
39836         com.google.protobuf.ByteString data)
39837         throws com.google.protobuf.InvalidProtocolBufferException {
39838       return PARSER.parseFrom(data);
39839     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39840     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(
39841         com.google.protobuf.ByteString data,
39842         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39843         throws com.google.protobuf.InvalidProtocolBufferException {
39844       return PARSER.parseFrom(data, extensionRegistry);
39845     }
parseFrom(byte[] data)39846     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(byte[] data)
39847         throws com.google.protobuf.InvalidProtocolBufferException {
39848       return PARSER.parseFrom(data);
39849     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39850     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(
39851         byte[] data,
39852         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39853         throws com.google.protobuf.InvalidProtocolBufferException {
39854       return PARSER.parseFrom(data, extensionRegistry);
39855     }
parseFrom(java.io.InputStream input)39856     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(java.io.InputStream input)
39857         throws java.io.IOException {
39858       return PARSER.parseFrom(input);
39859     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39860     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(
39861         java.io.InputStream input,
39862         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39863         throws java.io.IOException {
39864       return PARSER.parseFrom(input, extensionRegistry);
39865     }
parseDelimitedFrom(java.io.InputStream input)39866     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom(java.io.InputStream input)
39867         throws java.io.IOException {
39868       return PARSER.parseDelimitedFrom(input);
39869     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39870     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseDelimitedFrom(
39871         java.io.InputStream input,
39872         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39873         throws java.io.IOException {
39874       return PARSER.parseDelimitedFrom(input, extensionRegistry);
39875     }
parseFrom( com.google.protobuf.CodedInputStream input)39876     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(
39877         com.google.protobuf.CodedInputStream input)
39878         throws java.io.IOException {
39879       return PARSER.parseFrom(input);
39880     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)39881     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parseFrom(
39882         com.google.protobuf.CodedInputStream input,
39883         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
39884         throws java.io.IOException {
39885       return PARSER.parseFrom(input, extensionRegistry);
39886     }
39887 
newBuilder()39888     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()39889     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest prototype)39890     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest prototype) {
39891       return newBuilder().mergeFrom(prototype);
39892     }
toBuilder()39893     public Builder toBuilder() { return newBuilder(this); }
39894 
39895     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)39896     protected Builder newBuilderForType(
39897         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
39898       Builder builder = new Builder(parent);
39899       return builder;
39900     }
39901     /**
39902      * Protobuf type {@code GetTableDescriptorsRequest}
39903      */
39904     public static final class Builder extends
39905         com.google.protobuf.GeneratedMessage.Builder<Builder>
39906        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequestOrBuilder {
39907       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()39908           getDescriptor() {
39909         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor;
39910       }
39911 
39912       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()39913           internalGetFieldAccessorTable() {
39914         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_fieldAccessorTable
39915             .ensureFieldAccessorsInitialized(
39916                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.Builder.class);
39917       }
39918 
39919       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.newBuilder()
Builder()39920       private Builder() {
39921         maybeForceBuilderInitialization();
39922       }
39923 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)39924       private Builder(
39925           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
39926         super(parent);
39927         maybeForceBuilderInitialization();
39928       }
maybeForceBuilderInitialization()39929       private void maybeForceBuilderInitialization() {
39930         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
39931           getTableNamesFieldBuilder();
39932         }
39933       }
create()39934       private static Builder create() {
39935         return new Builder();
39936       }
39937 
clear()39938       public Builder clear() {
39939         super.clear();
39940         if (tableNamesBuilder_ == null) {
39941           tableNames_ = java.util.Collections.emptyList();
39942           bitField0_ = (bitField0_ & ~0x00000001);
39943         } else {
39944           tableNamesBuilder_.clear();
39945         }
39946         regex_ = "";
39947         bitField0_ = (bitField0_ & ~0x00000002);
39948         includeSysTables_ = false;
39949         bitField0_ = (bitField0_ & ~0x00000004);
39950         namespace_ = "";
39951         bitField0_ = (bitField0_ & ~0x00000008);
39952         return this;
39953       }
39954 
clone()39955       public Builder clone() {
39956         return create().mergeFrom(buildPartial());
39957       }
39958 
39959       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()39960           getDescriptorForType() {
39961         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsRequest_descriptor;
39962       }
39963 
getDefaultInstanceForType()39964       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest getDefaultInstanceForType() {
39965         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance();
39966       }
39967 
build()39968       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest build() {
39969         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = buildPartial();
39970         if (!result.isInitialized()) {
39971           throw newUninitializedMessageException(result);
39972         }
39973         return result;
39974       }
39975 
buildPartial()39976       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest buildPartial() {
39977         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest(this);
39978         int from_bitField0_ = bitField0_;
39979         int to_bitField0_ = 0;
39980         if (tableNamesBuilder_ == null) {
39981           if (((bitField0_ & 0x00000001) == 0x00000001)) {
39982             tableNames_ = java.util.Collections.unmodifiableList(tableNames_);
39983             bitField0_ = (bitField0_ & ~0x00000001);
39984           }
39985           result.tableNames_ = tableNames_;
39986         } else {
39987           result.tableNames_ = tableNamesBuilder_.build();
39988         }
39989         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
39990           to_bitField0_ |= 0x00000001;
39991         }
39992         result.regex_ = regex_;
39993         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
39994           to_bitField0_ |= 0x00000002;
39995         }
39996         result.includeSysTables_ = includeSysTables_;
39997         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
39998           to_bitField0_ |= 0x00000004;
39999         }
40000         result.namespace_ = namespace_;
40001         result.bitField0_ = to_bitField0_;
40002         onBuilt();
40003         return result;
40004       }
40005 
mergeFrom(com.google.protobuf.Message other)40006       public Builder mergeFrom(com.google.protobuf.Message other) {
40007         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) {
40008           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)other);
40009         } else {
40010           super.mergeFrom(other);
40011           return this;
40012         }
40013       }
40014 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other)40015       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest other) {
40016         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance()) return this;
40017         if (tableNamesBuilder_ == null) {
40018           if (!other.tableNames_.isEmpty()) {
40019             if (tableNames_.isEmpty()) {
40020               tableNames_ = other.tableNames_;
40021               bitField0_ = (bitField0_ & ~0x00000001);
40022             } else {
40023               ensureTableNamesIsMutable();
40024               tableNames_.addAll(other.tableNames_);
40025             }
40026             onChanged();
40027           }
40028         } else {
40029           if (!other.tableNames_.isEmpty()) {
40030             if (tableNamesBuilder_.isEmpty()) {
40031               tableNamesBuilder_.dispose();
40032               tableNamesBuilder_ = null;
40033               tableNames_ = other.tableNames_;
40034               bitField0_ = (bitField0_ & ~0x00000001);
40035               tableNamesBuilder_ =
40036                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
40037                    getTableNamesFieldBuilder() : null;
40038             } else {
40039               tableNamesBuilder_.addAllMessages(other.tableNames_);
40040             }
40041           }
40042         }
40043         if (other.hasRegex()) {
40044           bitField0_ |= 0x00000002;
40045           regex_ = other.regex_;
40046           onChanged();
40047         }
40048         if (other.hasIncludeSysTables()) {
40049           setIncludeSysTables(other.getIncludeSysTables());
40050         }
40051         if (other.hasNamespace()) {
40052           bitField0_ |= 0x00000008;
40053           namespace_ = other.namespace_;
40054           onChanged();
40055         }
40056         this.mergeUnknownFields(other.getUnknownFields());
40057         return this;
40058       }
40059 
isInitialized()40060       public final boolean isInitialized() {
40061         for (int i = 0; i < getTableNamesCount(); i++) {
40062           if (!getTableNames(i).isInitialized()) {
40063 
40064             return false;
40065           }
40066         }
40067         return true;
40068       }
40069 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40070       public Builder mergeFrom(
40071           com.google.protobuf.CodedInputStream input,
40072           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40073           throws java.io.IOException {
40074         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest parsedMessage = null;
40075         try {
40076           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
40077         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
40078           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) e.getUnfinishedMessage();
40079           throw e;
40080         } finally {
40081           if (parsedMessage != null) {
40082             mergeFrom(parsedMessage);
40083           }
40084         }
40085         return this;
40086       }
40087       private int bitField0_;
40088 
40089       // repeated .TableName table_names = 1;
40090       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_ =
40091         java.util.Collections.emptyList();
ensureTableNamesIsMutable()40092       private void ensureTableNamesIsMutable() {
40093         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
40094           tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableNames_);
40095           bitField0_ |= 0x00000001;
40096          }
40097       }
40098 
40099       private com.google.protobuf.RepeatedFieldBuilder<
40100           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNamesBuilder_;
40101 
40102       /**
40103        * <code>repeated .TableName table_names = 1;</code>
40104        */
getTableNamesList()40105       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() {
40106         if (tableNamesBuilder_ == null) {
40107           return java.util.Collections.unmodifiableList(tableNames_);
40108         } else {
40109           return tableNamesBuilder_.getMessageList();
40110         }
40111       }
40112       /**
40113        * <code>repeated .TableName table_names = 1;</code>
40114        */
getTableNamesCount()40115       public int getTableNamesCount() {
40116         if (tableNamesBuilder_ == null) {
40117           return tableNames_.size();
40118         } else {
40119           return tableNamesBuilder_.getCount();
40120         }
40121       }
40122       /**
40123        * <code>repeated .TableName table_names = 1;</code>
40124        */
getTableNames(int index)40125       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) {
40126         if (tableNamesBuilder_ == null) {
40127           return tableNames_.get(index);
40128         } else {
40129           return tableNamesBuilder_.getMessage(index);
40130         }
40131       }
40132       /**
40133        * <code>repeated .TableName table_names = 1;</code>
40134        */
setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)40135       public Builder setTableNames(
40136           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
40137         if (tableNamesBuilder_ == null) {
40138           if (value == null) {
40139             throw new NullPointerException();
40140           }
40141           ensureTableNamesIsMutable();
40142           tableNames_.set(index, value);
40143           onChanged();
40144         } else {
40145           tableNamesBuilder_.setMessage(index, value);
40146         }
40147         return this;
40148       }
40149       /**
40150        * <code>repeated .TableName table_names = 1;</code>
40151        */
setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)40152       public Builder setTableNames(
40153           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
40154         if (tableNamesBuilder_ == null) {
40155           ensureTableNamesIsMutable();
40156           tableNames_.set(index, builderForValue.build());
40157           onChanged();
40158         } else {
40159           tableNamesBuilder_.setMessage(index, builderForValue.build());
40160         }
40161         return this;
40162       }
40163       /**
40164        * <code>repeated .TableName table_names = 1;</code>
40165        */
addTableNames(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)40166       public Builder addTableNames(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
40167         if (tableNamesBuilder_ == null) {
40168           if (value == null) {
40169             throw new NullPointerException();
40170           }
40171           ensureTableNamesIsMutable();
40172           tableNames_.add(value);
40173           onChanged();
40174         } else {
40175           tableNamesBuilder_.addMessage(value);
40176         }
40177         return this;
40178       }
40179       /**
40180        * <code>repeated .TableName table_names = 1;</code>
40181        */
addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)40182       public Builder addTableNames(
40183           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
40184         if (tableNamesBuilder_ == null) {
40185           if (value == null) {
40186             throw new NullPointerException();
40187           }
40188           ensureTableNamesIsMutable();
40189           tableNames_.add(index, value);
40190           onChanged();
40191         } else {
40192           tableNamesBuilder_.addMessage(index, value);
40193         }
40194         return this;
40195       }
40196       /**
40197        * <code>repeated .TableName table_names = 1;</code>
40198        */
addTableNames( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)40199       public Builder addTableNames(
40200           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
40201         if (tableNamesBuilder_ == null) {
40202           ensureTableNamesIsMutable();
40203           tableNames_.add(builderForValue.build());
40204           onChanged();
40205         } else {
40206           tableNamesBuilder_.addMessage(builderForValue.build());
40207         }
40208         return this;
40209       }
40210       /**
40211        * <code>repeated .TableName table_names = 1;</code>
40212        */
addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)40213       public Builder addTableNames(
40214           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
40215         if (tableNamesBuilder_ == null) {
40216           ensureTableNamesIsMutable();
40217           tableNames_.add(index, builderForValue.build());
40218           onChanged();
40219         } else {
40220           tableNamesBuilder_.addMessage(index, builderForValue.build());
40221         }
40222         return this;
40223       }
40224       /**
40225        * <code>repeated .TableName table_names = 1;</code>
40226        */
addAllTableNames( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values)40227       public Builder addAllTableNames(
40228           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) {
40229         if (tableNamesBuilder_ == null) {
40230           ensureTableNamesIsMutable();
40231           super.addAll(values, tableNames_);
40232           onChanged();
40233         } else {
40234           tableNamesBuilder_.addAllMessages(values);
40235         }
40236         return this;
40237       }
40238       /**
40239        * <code>repeated .TableName table_names = 1;</code>
40240        */
clearTableNames()40241       public Builder clearTableNames() {
40242         if (tableNamesBuilder_ == null) {
40243           tableNames_ = java.util.Collections.emptyList();
40244           bitField0_ = (bitField0_ & ~0x00000001);
40245           onChanged();
40246         } else {
40247           tableNamesBuilder_.clear();
40248         }
40249         return this;
40250       }
40251       /**
40252        * <code>repeated .TableName table_names = 1;</code>
40253        */
removeTableNames(int index)40254       public Builder removeTableNames(int index) {
40255         if (tableNamesBuilder_ == null) {
40256           ensureTableNamesIsMutable();
40257           tableNames_.remove(index);
40258           onChanged();
40259         } else {
40260           tableNamesBuilder_.remove(index);
40261         }
40262         return this;
40263       }
40264       /**
40265        * <code>repeated .TableName table_names = 1;</code>
40266        */
getTableNamesBuilder( int index)40267       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNamesBuilder(
40268           int index) {
40269         return getTableNamesFieldBuilder().getBuilder(index);
40270       }
40271       /**
40272        * <code>repeated .TableName table_names = 1;</code>
40273        */
getTableNamesOrBuilder( int index)40274       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder(
40275           int index) {
40276         if (tableNamesBuilder_ == null) {
40277           return tableNames_.get(index);  } else {
40278           return tableNamesBuilder_.getMessageOrBuilder(index);
40279         }
40280       }
40281       /**
40282        * <code>repeated .TableName table_names = 1;</code>
40283        */
40284       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesOrBuilderList()40285            getTableNamesOrBuilderList() {
40286         if (tableNamesBuilder_ != null) {
40287           return tableNamesBuilder_.getMessageOrBuilderList();
40288         } else {
40289           return java.util.Collections.unmodifiableList(tableNames_);
40290         }
40291       }
40292       /**
40293        * <code>repeated .TableName table_names = 1;</code>
40294        */
addTableNamesBuilder()40295       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder() {
40296         return getTableNamesFieldBuilder().addBuilder(
40297             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
40298       }
40299       /**
40300        * <code>repeated .TableName table_names = 1;</code>
40301        */
addTableNamesBuilder( int index)40302       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder(
40303           int index) {
40304         return getTableNamesFieldBuilder().addBuilder(
40305             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
40306       }
40307       /**
40308        * <code>repeated .TableName table_names = 1;</code>
40309        */
40310       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder>
getTableNamesBuilderList()40311            getTableNamesBuilderList() {
40312         return getTableNamesFieldBuilder().getBuilderList();
40313       }
40314       private com.google.protobuf.RepeatedFieldBuilder<
40315           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesFieldBuilder()40316           getTableNamesFieldBuilder() {
40317         if (tableNamesBuilder_ == null) {
40318           tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
40319               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
40320                   tableNames_,
40321                   ((bitField0_ & 0x00000001) == 0x00000001),
40322                   getParentForChildren(),
40323                   isClean());
40324           tableNames_ = null;
40325         }
40326         return tableNamesBuilder_;
40327       }
40328 
40329       // optional string regex = 2;
40330       private java.lang.Object regex_ = "";
40331       /**
40332        * <code>optional string regex = 2;</code>
40333        */
hasRegex()40334       public boolean hasRegex() {
40335         return ((bitField0_ & 0x00000002) == 0x00000002);
40336       }
40337       /**
40338        * <code>optional string regex = 2;</code>
40339        */
getRegex()40340       public java.lang.String getRegex() {
40341         java.lang.Object ref = regex_;
40342         if (!(ref instanceof java.lang.String)) {
40343           java.lang.String s = ((com.google.protobuf.ByteString) ref)
40344               .toStringUtf8();
40345           regex_ = s;
40346           return s;
40347         } else {
40348           return (java.lang.String) ref;
40349         }
40350       }
40351       /**
40352        * <code>optional string regex = 2;</code>
40353        */
40354       public com.google.protobuf.ByteString
getRegexBytes()40355           getRegexBytes() {
40356         java.lang.Object ref = regex_;
40357         if (ref instanceof String) {
40358           com.google.protobuf.ByteString b =
40359               com.google.protobuf.ByteString.copyFromUtf8(
40360                   (java.lang.String) ref);
40361           regex_ = b;
40362           return b;
40363         } else {
40364           return (com.google.protobuf.ByteString) ref;
40365         }
40366       }
40367       /**
40368        * <code>optional string regex = 2;</code>
40369        */
setRegex( java.lang.String value)40370       public Builder setRegex(
40371           java.lang.String value) {
40372         if (value == null) {
40373     throw new NullPointerException();
40374   }
40375   bitField0_ |= 0x00000002;
40376         regex_ = value;
40377         onChanged();
40378         return this;
40379       }
40380       /**
40381        * <code>optional string regex = 2;</code>
40382        */
clearRegex()40383       public Builder clearRegex() {
40384         bitField0_ = (bitField0_ & ~0x00000002);
40385         regex_ = getDefaultInstance().getRegex();
40386         onChanged();
40387         return this;
40388       }
40389       /**
40390        * <code>optional string regex = 2;</code>
40391        */
setRegexBytes( com.google.protobuf.ByteString value)40392       public Builder setRegexBytes(
40393           com.google.protobuf.ByteString value) {
40394         if (value == null) {
40395     throw new NullPointerException();
40396   }
40397   bitField0_ |= 0x00000002;
40398         regex_ = value;
40399         onChanged();
40400         return this;
40401       }
40402 
40403       // optional bool include_sys_tables = 3 [default = false];
40404       private boolean includeSysTables_ ;
40405       /**
40406        * <code>optional bool include_sys_tables = 3 [default = false];</code>
40407        */
hasIncludeSysTables()40408       public boolean hasIncludeSysTables() {
40409         return ((bitField0_ & 0x00000004) == 0x00000004);
40410       }
40411       /**
40412        * <code>optional bool include_sys_tables = 3 [default = false];</code>
40413        */
getIncludeSysTables()40414       public boolean getIncludeSysTables() {
40415         return includeSysTables_;
40416       }
40417       /**
40418        * <code>optional bool include_sys_tables = 3 [default = false];</code>
40419        */
setIncludeSysTables(boolean value)40420       public Builder setIncludeSysTables(boolean value) {
40421         bitField0_ |= 0x00000004;
40422         includeSysTables_ = value;
40423         onChanged();
40424         return this;
40425       }
40426       /**
40427        * <code>optional bool include_sys_tables = 3 [default = false];</code>
40428        */
clearIncludeSysTables()40429       public Builder clearIncludeSysTables() {
40430         bitField0_ = (bitField0_ & ~0x00000004);
40431         includeSysTables_ = false;
40432         onChanged();
40433         return this;
40434       }
40435 
40436       // optional string namespace = 4;
40437       private java.lang.Object namespace_ = "";
40438       /**
40439        * <code>optional string namespace = 4;</code>
40440        */
hasNamespace()40441       public boolean hasNamespace() {
40442         return ((bitField0_ & 0x00000008) == 0x00000008);
40443       }
40444       /**
40445        * <code>optional string namespace = 4;</code>
40446        */
getNamespace()40447       public java.lang.String getNamespace() {
40448         java.lang.Object ref = namespace_;
40449         if (!(ref instanceof java.lang.String)) {
40450           java.lang.String s = ((com.google.protobuf.ByteString) ref)
40451               .toStringUtf8();
40452           namespace_ = s;
40453           return s;
40454         } else {
40455           return (java.lang.String) ref;
40456         }
40457       }
40458       /**
40459        * <code>optional string namespace = 4;</code>
40460        */
40461       public com.google.protobuf.ByteString
getNamespaceBytes()40462           getNamespaceBytes() {
40463         java.lang.Object ref = namespace_;
40464         if (ref instanceof String) {
40465           com.google.protobuf.ByteString b =
40466               com.google.protobuf.ByteString.copyFromUtf8(
40467                   (java.lang.String) ref);
40468           namespace_ = b;
40469           return b;
40470         } else {
40471           return (com.google.protobuf.ByteString) ref;
40472         }
40473       }
40474       /**
40475        * <code>optional string namespace = 4;</code>
40476        */
setNamespace( java.lang.String value)40477       public Builder setNamespace(
40478           java.lang.String value) {
40479         if (value == null) {
40480     throw new NullPointerException();
40481   }
40482   bitField0_ |= 0x00000008;
40483         namespace_ = value;
40484         onChanged();
40485         return this;
40486       }
40487       /**
40488        * <code>optional string namespace = 4;</code>
40489        */
clearNamespace()40490       public Builder clearNamespace() {
40491         bitField0_ = (bitField0_ & ~0x00000008);
40492         namespace_ = getDefaultInstance().getNamespace();
40493         onChanged();
40494         return this;
40495       }
40496       /**
40497        * <code>optional string namespace = 4;</code>
40498        */
setNamespaceBytes( com.google.protobuf.ByteString value)40499       public Builder setNamespaceBytes(
40500           com.google.protobuf.ByteString value) {
40501         if (value == null) {
40502     throw new NullPointerException();
40503   }
40504   bitField0_ |= 0x00000008;
40505         namespace_ = value;
40506         onChanged();
40507         return this;
40508       }
40509 
40510       // @@protoc_insertion_point(builder_scope:GetTableDescriptorsRequest)
40511     }
40512 
40513     static {
40514       defaultInstance = new GetTableDescriptorsRequest(true);
defaultInstance.initFields()40515       defaultInstance.initFields();
40516     }
40517 
40518     // @@protoc_insertion_point(class_scope:GetTableDescriptorsRequest)
40519   }
40520 
40521   public interface GetTableDescriptorsResponseOrBuilder
40522       extends com.google.protobuf.MessageOrBuilder {
40523 
40524     // repeated .TableSchema table_schema = 1;
40525     /**
40526      * <code>repeated .TableSchema table_schema = 1;</code>
40527      */
40528     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>
getTableSchemaList()40529         getTableSchemaList();
40530     /**
40531      * <code>repeated .TableSchema table_schema = 1;</code>
40532      */
getTableSchema(int index)40533     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index);
40534     /**
40535      * <code>repeated .TableSchema table_schema = 1;</code>
40536      */
getTableSchemaCount()40537     int getTableSchemaCount();
40538     /**
40539      * <code>repeated .TableSchema table_schema = 1;</code>
40540      */
40541     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaOrBuilderList()40542         getTableSchemaOrBuilderList();
40543     /**
40544      * <code>repeated .TableSchema table_schema = 1;</code>
40545      */
getTableSchemaOrBuilder( int index)40546     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(
40547         int index);
40548   }
40549   /**
40550    * Protobuf type {@code GetTableDescriptorsResponse}
40551    */
40552   public static final class GetTableDescriptorsResponse extends
40553       com.google.protobuf.GeneratedMessage
40554       implements GetTableDescriptorsResponseOrBuilder {
40555     // Use GetTableDescriptorsResponse.newBuilder() to construct.
GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)40556     private GetTableDescriptorsResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
40557       super(builder);
40558       this.unknownFields = builder.getUnknownFields();
40559     }
GetTableDescriptorsResponse(boolean noInit)40560     private GetTableDescriptorsResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
40561 
40562     private static final GetTableDescriptorsResponse defaultInstance;
getDefaultInstance()40563     public static GetTableDescriptorsResponse getDefaultInstance() {
40564       return defaultInstance;
40565     }
40566 
getDefaultInstanceForType()40567     public GetTableDescriptorsResponse getDefaultInstanceForType() {
40568       return defaultInstance;
40569     }
40570 
40571     private final com.google.protobuf.UnknownFieldSet unknownFields;
40572     @java.lang.Override
40573     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()40574         getUnknownFields() {
40575       return this.unknownFields;
40576     }
GetTableDescriptorsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40577     private GetTableDescriptorsResponse(
40578         com.google.protobuf.CodedInputStream input,
40579         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40580         throws com.google.protobuf.InvalidProtocolBufferException {
40581       initFields();
40582       int mutable_bitField0_ = 0;
40583       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
40584           com.google.protobuf.UnknownFieldSet.newBuilder();
40585       try {
40586         boolean done = false;
40587         while (!done) {
40588           int tag = input.readTag();
40589           switch (tag) {
40590             case 0:
40591               done = true;
40592               break;
40593             default: {
40594               if (!parseUnknownField(input, unknownFields,
40595                                      extensionRegistry, tag)) {
40596                 done = true;
40597               }
40598               break;
40599             }
40600             case 10: {
40601               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
40602                 tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>();
40603                 mutable_bitField0_ |= 0x00000001;
40604               }
40605               tableSchema_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry));
40606               break;
40607             }
40608           }
40609         }
40610       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
40611         throw e.setUnfinishedMessage(this);
40612       } catch (java.io.IOException e) {
40613         throw new com.google.protobuf.InvalidProtocolBufferException(
40614             e.getMessage()).setUnfinishedMessage(this);
40615       } finally {
40616         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
40617           tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_);
40618         }
40619         this.unknownFields = unknownFields.build();
40620         makeExtensionsImmutable();
40621       }
40622     }
40623     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()40624         getDescriptor() {
40625       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor;
40626     }
40627 
40628     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()40629         internalGetFieldAccessorTable() {
40630       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable
40631           .ensureFieldAccessorsInitialized(
40632               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.Builder.class);
40633     }
40634 
40635     public static com.google.protobuf.Parser<GetTableDescriptorsResponse> PARSER =
40636         new com.google.protobuf.AbstractParser<GetTableDescriptorsResponse>() {
40637       public GetTableDescriptorsResponse parsePartialFrom(
40638           com.google.protobuf.CodedInputStream input,
40639           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40640           throws com.google.protobuf.InvalidProtocolBufferException {
40641         return new GetTableDescriptorsResponse(input, extensionRegistry);
40642       }
40643     };
40644 
40645     @java.lang.Override
getParserForType()40646     public com.google.protobuf.Parser<GetTableDescriptorsResponse> getParserForType() {
40647       return PARSER;
40648     }
40649 
40650     // repeated .TableSchema table_schema = 1;
40651     public static final int TABLE_SCHEMA_FIELD_NUMBER = 1;
40652     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_;
40653     /**
40654      * <code>repeated .TableSchema table_schema = 1;</code>
40655      */
getTableSchemaList()40656     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() {
40657       return tableSchema_;
40658     }
40659     /**
40660      * <code>repeated .TableSchema table_schema = 1;</code>
40661      */
40662     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaOrBuilderList()40663         getTableSchemaOrBuilderList() {
40664       return tableSchema_;
40665     }
40666     /**
40667      * <code>repeated .TableSchema table_schema = 1;</code>
40668      */
getTableSchemaCount()40669     public int getTableSchemaCount() {
40670       return tableSchema_.size();
40671     }
40672     /**
40673      * <code>repeated .TableSchema table_schema = 1;</code>
40674      */
getTableSchema(int index)40675     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) {
40676       return tableSchema_.get(index);
40677     }
40678     /**
40679      * <code>repeated .TableSchema table_schema = 1;</code>
40680      */
getTableSchemaOrBuilder( int index)40681     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(
40682         int index) {
40683       return tableSchema_.get(index);
40684     }
40685 
initFields()40686     private void initFields() {
40687       tableSchema_ = java.util.Collections.emptyList();
40688     }
40689     private byte memoizedIsInitialized = -1;
isInitialized()40690     public final boolean isInitialized() {
40691       byte isInitialized = memoizedIsInitialized;
40692       if (isInitialized != -1) return isInitialized == 1;
40693 
40694       for (int i = 0; i < getTableSchemaCount(); i++) {
40695         if (!getTableSchema(i).isInitialized()) {
40696           memoizedIsInitialized = 0;
40697           return false;
40698         }
40699       }
40700       memoizedIsInitialized = 1;
40701       return true;
40702     }
40703 
writeTo(com.google.protobuf.CodedOutputStream output)40704     public void writeTo(com.google.protobuf.CodedOutputStream output)
40705                         throws java.io.IOException {
40706       getSerializedSize();
40707       for (int i = 0; i < tableSchema_.size(); i++) {
40708         output.writeMessage(1, tableSchema_.get(i));
40709       }
40710       getUnknownFields().writeTo(output);
40711     }
40712 
40713     private int memoizedSerializedSize = -1;
getSerializedSize()40714     public int getSerializedSize() {
40715       int size = memoizedSerializedSize;
40716       if (size != -1) return size;
40717 
40718       size = 0;
40719       for (int i = 0; i < tableSchema_.size(); i++) {
40720         size += com.google.protobuf.CodedOutputStream
40721           .computeMessageSize(1, tableSchema_.get(i));
40722       }
40723       size += getUnknownFields().getSerializedSize();
40724       memoizedSerializedSize = size;
40725       return size;
40726     }
40727 
40728     private static final long serialVersionUID = 0L;
40729     @java.lang.Override
writeReplace()40730     protected java.lang.Object writeReplace()
40731         throws java.io.ObjectStreamException {
40732       return super.writeReplace();
40733     }
40734 
40735     @java.lang.Override
equals(final java.lang.Object obj)40736     public boolean equals(final java.lang.Object obj) {
40737       if (obj == this) {
40738        return true;
40739       }
40740       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)) {
40741         return super.equals(obj);
40742       }
40743       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) obj;
40744 
40745       boolean result = true;
40746       result = result && getTableSchemaList()
40747           .equals(other.getTableSchemaList());
40748       result = result &&
40749           getUnknownFields().equals(other.getUnknownFields());
40750       return result;
40751     }
40752 
40753     private int memoizedHashCode = 0;
40754     @java.lang.Override
hashCode()40755     public int hashCode() {
40756       if (memoizedHashCode != 0) {
40757         return memoizedHashCode;
40758       }
40759       int hash = 41;
40760       hash = (19 * hash) + getDescriptorForType().hashCode();
40761       if (getTableSchemaCount() > 0) {
40762         hash = (37 * hash) + TABLE_SCHEMA_FIELD_NUMBER;
40763         hash = (53 * hash) + getTableSchemaList().hashCode();
40764       }
40765       hash = (29 * hash) + getUnknownFields().hashCode();
40766       memoizedHashCode = hash;
40767       return hash;
40768     }
40769 
parseFrom( com.google.protobuf.ByteString data)40770     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(
40771         com.google.protobuf.ByteString data)
40772         throws com.google.protobuf.InvalidProtocolBufferException {
40773       return PARSER.parseFrom(data);
40774     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40775     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(
40776         com.google.protobuf.ByteString data,
40777         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40778         throws com.google.protobuf.InvalidProtocolBufferException {
40779       return PARSER.parseFrom(data, extensionRegistry);
40780     }
parseFrom(byte[] data)40781     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(byte[] data)
40782         throws com.google.protobuf.InvalidProtocolBufferException {
40783       return PARSER.parseFrom(data);
40784     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40785     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(
40786         byte[] data,
40787         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40788         throws com.google.protobuf.InvalidProtocolBufferException {
40789       return PARSER.parseFrom(data, extensionRegistry);
40790     }
parseFrom(java.io.InputStream input)40791     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(java.io.InputStream input)
40792         throws java.io.IOException {
40793       return PARSER.parseFrom(input);
40794     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40795     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(
40796         java.io.InputStream input,
40797         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40798         throws java.io.IOException {
40799       return PARSER.parseFrom(input, extensionRegistry);
40800     }
parseDelimitedFrom(java.io.InputStream input)40801     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom(java.io.InputStream input)
40802         throws java.io.IOException {
40803       return PARSER.parseDelimitedFrom(input);
40804     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40805     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseDelimitedFrom(
40806         java.io.InputStream input,
40807         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40808         throws java.io.IOException {
40809       return PARSER.parseDelimitedFrom(input, extensionRegistry);
40810     }
parseFrom( com.google.protobuf.CodedInputStream input)40811     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(
40812         com.google.protobuf.CodedInputStream input)
40813         throws java.io.IOException {
40814       return PARSER.parseFrom(input);
40815     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40816     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parseFrom(
40817         com.google.protobuf.CodedInputStream input,
40818         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40819         throws java.io.IOException {
40820       return PARSER.parseFrom(input, extensionRegistry);
40821     }
40822 
newBuilder()40823     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()40824     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse prototype)40825     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse prototype) {
40826       return newBuilder().mergeFrom(prototype);
40827     }
toBuilder()40828     public Builder toBuilder() { return newBuilder(this); }
40829 
40830     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)40831     protected Builder newBuilderForType(
40832         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
40833       Builder builder = new Builder(parent);
40834       return builder;
40835     }
40836     /**
40837      * Protobuf type {@code GetTableDescriptorsResponse}
40838      */
40839     public static final class Builder extends
40840         com.google.protobuf.GeneratedMessage.Builder<Builder>
40841        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponseOrBuilder {
40842       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()40843           getDescriptor() {
40844         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor;
40845       }
40846 
40847       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()40848           internalGetFieldAccessorTable() {
40849         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_fieldAccessorTable
40850             .ensureFieldAccessorsInitialized(
40851                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.Builder.class);
40852       }
40853 
40854       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.newBuilder()
Builder()40855       private Builder() {
40856         maybeForceBuilderInitialization();
40857       }
40858 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)40859       private Builder(
40860           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
40861         super(parent);
40862         maybeForceBuilderInitialization();
40863       }
maybeForceBuilderInitialization()40864       private void maybeForceBuilderInitialization() {
40865         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
40866           getTableSchemaFieldBuilder();
40867         }
40868       }
create()40869       private static Builder create() {
40870         return new Builder();
40871       }
40872 
clear()40873       public Builder clear() {
40874         super.clear();
40875         if (tableSchemaBuilder_ == null) {
40876           tableSchema_ = java.util.Collections.emptyList();
40877           bitField0_ = (bitField0_ & ~0x00000001);
40878         } else {
40879           tableSchemaBuilder_.clear();
40880         }
40881         return this;
40882       }
40883 
clone()40884       public Builder clone() {
40885         return create().mergeFrom(buildPartial());
40886       }
40887 
40888       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()40889           getDescriptorForType() {
40890         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableDescriptorsResponse_descriptor;
40891       }
40892 
getDefaultInstanceForType()40893       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getDefaultInstanceForType() {
40894         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance();
40895       }
40896 
build()40897       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse build() {
40898         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = buildPartial();
40899         if (!result.isInitialized()) {
40900           throw newUninitializedMessageException(result);
40901         }
40902         return result;
40903       }
40904 
buildPartial()40905       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse buildPartial() {
40906         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse(this);
40907         int from_bitField0_ = bitField0_;
40908         if (tableSchemaBuilder_ == null) {
40909           if (((bitField0_ & 0x00000001) == 0x00000001)) {
40910             tableSchema_ = java.util.Collections.unmodifiableList(tableSchema_);
40911             bitField0_ = (bitField0_ & ~0x00000001);
40912           }
40913           result.tableSchema_ = tableSchema_;
40914         } else {
40915           result.tableSchema_ = tableSchemaBuilder_.build();
40916         }
40917         onBuilt();
40918         return result;
40919       }
40920 
mergeFrom(com.google.protobuf.Message other)40921       public Builder mergeFrom(com.google.protobuf.Message other) {
40922         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) {
40923           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse)other);
40924         } else {
40925           super.mergeFrom(other);
40926           return this;
40927         }
40928       }
40929 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other)40930       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse other) {
40931         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance()) return this;
40932         if (tableSchemaBuilder_ == null) {
40933           if (!other.tableSchema_.isEmpty()) {
40934             if (tableSchema_.isEmpty()) {
40935               tableSchema_ = other.tableSchema_;
40936               bitField0_ = (bitField0_ & ~0x00000001);
40937             } else {
40938               ensureTableSchemaIsMutable();
40939               tableSchema_.addAll(other.tableSchema_);
40940             }
40941             onChanged();
40942           }
40943         } else {
40944           if (!other.tableSchema_.isEmpty()) {
40945             if (tableSchemaBuilder_.isEmpty()) {
40946               tableSchemaBuilder_.dispose();
40947               tableSchemaBuilder_ = null;
40948               tableSchema_ = other.tableSchema_;
40949               bitField0_ = (bitField0_ & ~0x00000001);
40950               tableSchemaBuilder_ =
40951                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
40952                    getTableSchemaFieldBuilder() : null;
40953             } else {
40954               tableSchemaBuilder_.addAllMessages(other.tableSchema_);
40955             }
40956           }
40957         }
40958         this.mergeUnknownFields(other.getUnknownFields());
40959         return this;
40960       }
40961 
isInitialized()40962       public final boolean isInitialized() {
40963         for (int i = 0; i < getTableSchemaCount(); i++) {
40964           if (!getTableSchema(i).isInitialized()) {
40965 
40966             return false;
40967           }
40968         }
40969         return true;
40970       }
40971 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)40972       public Builder mergeFrom(
40973           com.google.protobuf.CodedInputStream input,
40974           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
40975           throws java.io.IOException {
40976         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse parsedMessage = null;
40977         try {
40978           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
40979         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
40980           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) e.getUnfinishedMessage();
40981           throw e;
40982         } finally {
40983           if (parsedMessage != null) {
40984             mergeFrom(parsedMessage);
40985           }
40986         }
40987         return this;
40988       }
40989       private int bitField0_;
40990 
40991       // repeated .TableSchema table_schema = 1;
40992       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> tableSchema_ =
40993         java.util.Collections.emptyList();
ensureTableSchemaIsMutable()40994       private void ensureTableSchemaIsMutable() {
40995         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
40996           tableSchema_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema>(tableSchema_);
40997           bitField0_ |= 0x00000001;
40998          }
40999       }
41000 
41001       private com.google.protobuf.RepeatedFieldBuilder<
41002           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableSchemaBuilder_;
41003 
41004       /**
41005        * <code>repeated .TableSchema table_schema = 1;</code>
41006        */
getTableSchemaList()41007       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> getTableSchemaList() {
41008         if (tableSchemaBuilder_ == null) {
41009           return java.util.Collections.unmodifiableList(tableSchema_);
41010         } else {
41011           return tableSchemaBuilder_.getMessageList();
41012         }
41013       }
41014       /**
41015        * <code>repeated .TableSchema table_schema = 1;</code>
41016        */
getTableSchemaCount()41017       public int getTableSchemaCount() {
41018         if (tableSchemaBuilder_ == null) {
41019           return tableSchema_.size();
41020         } else {
41021           return tableSchemaBuilder_.getCount();
41022         }
41023       }
41024       /**
41025        * <code>repeated .TableSchema table_schema = 1;</code>
41026        */
getTableSchema(int index)41027       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTableSchema(int index) {
41028         if (tableSchemaBuilder_ == null) {
41029           return tableSchema_.get(index);
41030         } else {
41031           return tableSchemaBuilder_.getMessage(index);
41032         }
41033       }
41034       /**
41035        * <code>repeated .TableSchema table_schema = 1;</code>
41036        */
setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)41037       public Builder setTableSchema(
41038           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
41039         if (tableSchemaBuilder_ == null) {
41040           if (value == null) {
41041             throw new NullPointerException();
41042           }
41043           ensureTableSchemaIsMutable();
41044           tableSchema_.set(index, value);
41045           onChanged();
41046         } else {
41047           tableSchemaBuilder_.setMessage(index, value);
41048         }
41049         return this;
41050       }
41051       /**
41052        * <code>repeated .TableSchema table_schema = 1;</code>
41053        */
setTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)41054       public Builder setTableSchema(
41055           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
41056         if (tableSchemaBuilder_ == null) {
41057           ensureTableSchemaIsMutable();
41058           tableSchema_.set(index, builderForValue.build());
41059           onChanged();
41060         } else {
41061           tableSchemaBuilder_.setMessage(index, builderForValue.build());
41062         }
41063         return this;
41064       }
41065       /**
41066        * <code>repeated .TableSchema table_schema = 1;</code>
41067        */
addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)41068       public Builder addTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
41069         if (tableSchemaBuilder_ == null) {
41070           if (value == null) {
41071             throw new NullPointerException();
41072           }
41073           ensureTableSchemaIsMutable();
41074           tableSchema_.add(value);
41075           onChanged();
41076         } else {
41077           tableSchemaBuilder_.addMessage(value);
41078         }
41079         return this;
41080       }
41081       /**
41082        * <code>repeated .TableSchema table_schema = 1;</code>
41083        */
addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value)41084       public Builder addTableSchema(
41085           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
41086         if (tableSchemaBuilder_ == null) {
41087           if (value == null) {
41088             throw new NullPointerException();
41089           }
41090           ensureTableSchemaIsMutable();
41091           tableSchema_.add(index, value);
41092           onChanged();
41093         } else {
41094           tableSchemaBuilder_.addMessage(index, value);
41095         }
41096         return this;
41097       }
41098       /**
41099        * <code>repeated .TableSchema table_schema = 1;</code>
41100        */
addTableSchema( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)41101       public Builder addTableSchema(
41102           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
41103         if (tableSchemaBuilder_ == null) {
41104           ensureTableSchemaIsMutable();
41105           tableSchema_.add(builderForValue.build());
41106           onChanged();
41107         } else {
41108           tableSchemaBuilder_.addMessage(builderForValue.build());
41109         }
41110         return this;
41111       }
41112       /**
41113        * <code>repeated .TableSchema table_schema = 1;</code>
41114        */
addTableSchema( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue)41115       public Builder addTableSchema(
41116           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
41117         if (tableSchemaBuilder_ == null) {
41118           ensureTableSchemaIsMutable();
41119           tableSchema_.add(index, builderForValue.build());
41120           onChanged();
41121         } else {
41122           tableSchemaBuilder_.addMessage(index, builderForValue.build());
41123         }
41124         return this;
41125       }
41126       /**
41127        * <code>repeated .TableSchema table_schema = 1;</code>
41128        */
addAllTableSchema( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> values)41129       public Builder addAllTableSchema(
41130           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema> values) {
41131         if (tableSchemaBuilder_ == null) {
41132           ensureTableSchemaIsMutable();
41133           super.addAll(values, tableSchema_);
41134           onChanged();
41135         } else {
41136           tableSchemaBuilder_.addAllMessages(values);
41137         }
41138         return this;
41139       }
41140       /**
41141        * <code>repeated .TableSchema table_schema = 1;</code>
41142        */
clearTableSchema()41143       public Builder clearTableSchema() {
41144         if (tableSchemaBuilder_ == null) {
41145           tableSchema_ = java.util.Collections.emptyList();
41146           bitField0_ = (bitField0_ & ~0x00000001);
41147           onChanged();
41148         } else {
41149           tableSchemaBuilder_.clear();
41150         }
41151         return this;
41152       }
41153       /**
41154        * <code>repeated .TableSchema table_schema = 1;</code>
41155        */
removeTableSchema(int index)41156       public Builder removeTableSchema(int index) {
41157         if (tableSchemaBuilder_ == null) {
41158           ensureTableSchemaIsMutable();
41159           tableSchema_.remove(index);
41160           onChanged();
41161         } else {
41162           tableSchemaBuilder_.remove(index);
41163         }
41164         return this;
41165       }
41166       /**
41167        * <code>repeated .TableSchema table_schema = 1;</code>
41168        */
getTableSchemaBuilder( int index)41169       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableSchemaBuilder(
41170           int index) {
41171         return getTableSchemaFieldBuilder().getBuilder(index);
41172       }
41173       /**
41174        * <code>repeated .TableSchema table_schema = 1;</code>
41175        */
getTableSchemaOrBuilder( int index)41176       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableSchemaOrBuilder(
41177           int index) {
41178         if (tableSchemaBuilder_ == null) {
41179           return tableSchema_.get(index);  } else {
41180           return tableSchemaBuilder_.getMessageOrBuilder(index);
41181         }
41182       }
41183       /**
41184        * <code>repeated .TableSchema table_schema = 1;</code>
41185        */
41186       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaOrBuilderList()41187            getTableSchemaOrBuilderList() {
41188         if (tableSchemaBuilder_ != null) {
41189           return tableSchemaBuilder_.getMessageOrBuilderList();
41190         } else {
41191           return java.util.Collections.unmodifiableList(tableSchema_);
41192         }
41193       }
41194       /**
41195        * <code>repeated .TableSchema table_schema = 1;</code>
41196        */
addTableSchemaBuilder()41197       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder() {
41198         return getTableSchemaFieldBuilder().addBuilder(
41199             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance());
41200       }
41201       /**
41202        * <code>repeated .TableSchema table_schema = 1;</code>
41203        */
addTableSchemaBuilder( int index)41204       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder addTableSchemaBuilder(
41205           int index) {
41206         return getTableSchemaFieldBuilder().addBuilder(
41207             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance());
41208       }
41209       /**
41210        * <code>repeated .TableSchema table_schema = 1;</code>
41211        */
41212       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder>
getTableSchemaBuilderList()41213            getTableSchemaBuilderList() {
41214         return getTableSchemaFieldBuilder().getBuilderList();
41215       }
41216       private com.google.protobuf.RepeatedFieldBuilder<
41217           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
getTableSchemaFieldBuilder()41218           getTableSchemaFieldBuilder() {
41219         if (tableSchemaBuilder_ == null) {
41220           tableSchemaBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
41221               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
41222                   tableSchema_,
41223                   ((bitField0_ & 0x00000001) == 0x00000001),
41224                   getParentForChildren(),
41225                   isClean());
41226           tableSchema_ = null;
41227         }
41228         return tableSchemaBuilder_;
41229       }
41230 
41231       // @@protoc_insertion_point(builder_scope:GetTableDescriptorsResponse)
41232     }
41233 
41234     static {
41235       defaultInstance = new GetTableDescriptorsResponse(true);
defaultInstance.initFields()41236       defaultInstance.initFields();
41237     }
41238 
41239     // @@protoc_insertion_point(class_scope:GetTableDescriptorsResponse)
41240   }
41241 
41242   public interface GetTableNamesRequestOrBuilder
41243       extends com.google.protobuf.MessageOrBuilder {
41244 
41245     // optional string regex = 1;
41246     /**
41247      * <code>optional string regex = 1;</code>
41248      */
hasRegex()41249     boolean hasRegex();
41250     /**
41251      * <code>optional string regex = 1;</code>
41252      */
getRegex()41253     java.lang.String getRegex();
41254     /**
41255      * <code>optional string regex = 1;</code>
41256      */
41257     com.google.protobuf.ByteString
getRegexBytes()41258         getRegexBytes();
41259 
41260     // optional bool include_sys_tables = 2 [default = false];
41261     /**
41262      * <code>optional bool include_sys_tables = 2 [default = false];</code>
41263      */
hasIncludeSysTables()41264     boolean hasIncludeSysTables();
41265     /**
41266      * <code>optional bool include_sys_tables = 2 [default = false];</code>
41267      */
getIncludeSysTables()41268     boolean getIncludeSysTables();
41269 
41270     // optional string namespace = 3;
41271     /**
41272      * <code>optional string namespace = 3;</code>
41273      */
hasNamespace()41274     boolean hasNamespace();
41275     /**
41276      * <code>optional string namespace = 3;</code>
41277      */
getNamespace()41278     java.lang.String getNamespace();
41279     /**
41280      * <code>optional string namespace = 3;</code>
41281      */
41282     com.google.protobuf.ByteString
getNamespaceBytes()41283         getNamespaceBytes();
41284   }
41285   /**
41286    * Protobuf type {@code GetTableNamesRequest}
41287    */
41288   public static final class GetTableNamesRequest extends
41289       com.google.protobuf.GeneratedMessage
41290       implements GetTableNamesRequestOrBuilder {
41291     // Use GetTableNamesRequest.newBuilder() to construct.
GetTableNamesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)41292     private GetTableNamesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
41293       super(builder);
41294       this.unknownFields = builder.getUnknownFields();
41295     }
GetTableNamesRequest(boolean noInit)41296     private GetTableNamesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
41297 
41298     private static final GetTableNamesRequest defaultInstance;
getDefaultInstance()41299     public static GetTableNamesRequest getDefaultInstance() {
41300       return defaultInstance;
41301     }
41302 
getDefaultInstanceForType()41303     public GetTableNamesRequest getDefaultInstanceForType() {
41304       return defaultInstance;
41305     }
41306 
41307     private final com.google.protobuf.UnknownFieldSet unknownFields;
41308     @java.lang.Override
41309     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()41310         getUnknownFields() {
41311       return this.unknownFields;
41312     }
GetTableNamesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41313     private GetTableNamesRequest(
41314         com.google.protobuf.CodedInputStream input,
41315         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41316         throws com.google.protobuf.InvalidProtocolBufferException {
41317       initFields();
41318       int mutable_bitField0_ = 0;
41319       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
41320           com.google.protobuf.UnknownFieldSet.newBuilder();
41321       try {
41322         boolean done = false;
41323         while (!done) {
41324           int tag = input.readTag();
41325           switch (tag) {
41326             case 0:
41327               done = true;
41328               break;
41329             default: {
41330               if (!parseUnknownField(input, unknownFields,
41331                                      extensionRegistry, tag)) {
41332                 done = true;
41333               }
41334               break;
41335             }
41336             case 10: {
41337               bitField0_ |= 0x00000001;
41338               regex_ = input.readBytes();
41339               break;
41340             }
41341             case 16: {
41342               bitField0_ |= 0x00000002;
41343               includeSysTables_ = input.readBool();
41344               break;
41345             }
41346             case 26: {
41347               bitField0_ |= 0x00000004;
41348               namespace_ = input.readBytes();
41349               break;
41350             }
41351           }
41352         }
41353       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
41354         throw e.setUnfinishedMessage(this);
41355       } catch (java.io.IOException e) {
41356         throw new com.google.protobuf.InvalidProtocolBufferException(
41357             e.getMessage()).setUnfinishedMessage(this);
41358       } finally {
41359         this.unknownFields = unknownFields.build();
41360         makeExtensionsImmutable();
41361       }
41362     }
41363     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()41364         getDescriptor() {
41365       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_descriptor;
41366     }
41367 
41368     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()41369         internalGetFieldAccessorTable() {
41370       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_fieldAccessorTable
41371           .ensureFieldAccessorsInitialized(
41372               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.Builder.class);
41373     }
41374 
41375     public static com.google.protobuf.Parser<GetTableNamesRequest> PARSER =
41376         new com.google.protobuf.AbstractParser<GetTableNamesRequest>() {
41377       public GetTableNamesRequest parsePartialFrom(
41378           com.google.protobuf.CodedInputStream input,
41379           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41380           throws com.google.protobuf.InvalidProtocolBufferException {
41381         return new GetTableNamesRequest(input, extensionRegistry);
41382       }
41383     };
41384 
41385     @java.lang.Override
getParserForType()41386     public com.google.protobuf.Parser<GetTableNamesRequest> getParserForType() {
41387       return PARSER;
41388     }
41389 
41390     private int bitField0_;
41391     // optional string regex = 1;
41392     public static final int REGEX_FIELD_NUMBER = 1;
41393     private java.lang.Object regex_;
41394     /**
41395      * <code>optional string regex = 1;</code>
41396      */
hasRegex()41397     public boolean hasRegex() {
41398       return ((bitField0_ & 0x00000001) == 0x00000001);
41399     }
41400     /**
41401      * <code>optional string regex = 1;</code>
41402      */
getRegex()41403     public java.lang.String getRegex() {
41404       java.lang.Object ref = regex_;
41405       if (ref instanceof java.lang.String) {
41406         return (java.lang.String) ref;
41407       } else {
41408         com.google.protobuf.ByteString bs =
41409             (com.google.protobuf.ByteString) ref;
41410         java.lang.String s = bs.toStringUtf8();
41411         if (bs.isValidUtf8()) {
41412           regex_ = s;
41413         }
41414         return s;
41415       }
41416     }
41417     /**
41418      * <code>optional string regex = 1;</code>
41419      */
41420     public com.google.protobuf.ByteString
getRegexBytes()41421         getRegexBytes() {
41422       java.lang.Object ref = regex_;
41423       if (ref instanceof java.lang.String) {
41424         com.google.protobuf.ByteString b =
41425             com.google.protobuf.ByteString.copyFromUtf8(
41426                 (java.lang.String) ref);
41427         regex_ = b;
41428         return b;
41429       } else {
41430         return (com.google.protobuf.ByteString) ref;
41431       }
41432     }
41433 
41434     // optional bool include_sys_tables = 2 [default = false];
41435     public static final int INCLUDE_SYS_TABLES_FIELD_NUMBER = 2;
41436     private boolean includeSysTables_;
41437     /**
41438      * <code>optional bool include_sys_tables = 2 [default = false];</code>
41439      */
hasIncludeSysTables()41440     public boolean hasIncludeSysTables() {
41441       return ((bitField0_ & 0x00000002) == 0x00000002);
41442     }
41443     /**
41444      * <code>optional bool include_sys_tables = 2 [default = false];</code>
41445      */
getIncludeSysTables()41446     public boolean getIncludeSysTables() {
41447       return includeSysTables_;
41448     }
41449 
41450     // optional string namespace = 3;
41451     public static final int NAMESPACE_FIELD_NUMBER = 3;
41452     private java.lang.Object namespace_;
41453     /**
41454      * <code>optional string namespace = 3;</code>
41455      */
hasNamespace()41456     public boolean hasNamespace() {
41457       return ((bitField0_ & 0x00000004) == 0x00000004);
41458     }
41459     /**
41460      * <code>optional string namespace = 3;</code>
41461      */
getNamespace()41462     public java.lang.String getNamespace() {
41463       java.lang.Object ref = namespace_;
41464       if (ref instanceof java.lang.String) {
41465         return (java.lang.String) ref;
41466       } else {
41467         com.google.protobuf.ByteString bs =
41468             (com.google.protobuf.ByteString) ref;
41469         java.lang.String s = bs.toStringUtf8();
41470         if (bs.isValidUtf8()) {
41471           namespace_ = s;
41472         }
41473         return s;
41474       }
41475     }
41476     /**
41477      * <code>optional string namespace = 3;</code>
41478      */
41479     public com.google.protobuf.ByteString
getNamespaceBytes()41480         getNamespaceBytes() {
41481       java.lang.Object ref = namespace_;
41482       if (ref instanceof java.lang.String) {
41483         com.google.protobuf.ByteString b =
41484             com.google.protobuf.ByteString.copyFromUtf8(
41485                 (java.lang.String) ref);
41486         namespace_ = b;
41487         return b;
41488       } else {
41489         return (com.google.protobuf.ByteString) ref;
41490       }
41491     }
41492 
initFields()41493     private void initFields() {
41494       regex_ = "";
41495       includeSysTables_ = false;
41496       namespace_ = "";
41497     }
41498     private byte memoizedIsInitialized = -1;
isInitialized()41499     public final boolean isInitialized() {
41500       byte isInitialized = memoizedIsInitialized;
41501       if (isInitialized != -1) return isInitialized == 1;
41502 
41503       memoizedIsInitialized = 1;
41504       return true;
41505     }
41506 
writeTo(com.google.protobuf.CodedOutputStream output)41507     public void writeTo(com.google.protobuf.CodedOutputStream output)
41508                         throws java.io.IOException {
41509       getSerializedSize();
41510       if (((bitField0_ & 0x00000001) == 0x00000001)) {
41511         output.writeBytes(1, getRegexBytes());
41512       }
41513       if (((bitField0_ & 0x00000002) == 0x00000002)) {
41514         output.writeBool(2, includeSysTables_);
41515       }
41516       if (((bitField0_ & 0x00000004) == 0x00000004)) {
41517         output.writeBytes(3, getNamespaceBytes());
41518       }
41519       getUnknownFields().writeTo(output);
41520     }
41521 
41522     private int memoizedSerializedSize = -1;
getSerializedSize()41523     public int getSerializedSize() {
41524       int size = memoizedSerializedSize;
41525       if (size != -1) return size;
41526 
41527       size = 0;
41528       if (((bitField0_ & 0x00000001) == 0x00000001)) {
41529         size += com.google.protobuf.CodedOutputStream
41530           .computeBytesSize(1, getRegexBytes());
41531       }
41532       if (((bitField0_ & 0x00000002) == 0x00000002)) {
41533         size += com.google.protobuf.CodedOutputStream
41534           .computeBoolSize(2, includeSysTables_);
41535       }
41536       if (((bitField0_ & 0x00000004) == 0x00000004)) {
41537         size += com.google.protobuf.CodedOutputStream
41538           .computeBytesSize(3, getNamespaceBytes());
41539       }
41540       size += getUnknownFields().getSerializedSize();
41541       memoizedSerializedSize = size;
41542       return size;
41543     }
41544 
41545     private static final long serialVersionUID = 0L;
41546     @java.lang.Override
writeReplace()41547     protected java.lang.Object writeReplace()
41548         throws java.io.ObjectStreamException {
41549       return super.writeReplace();
41550     }
41551 
41552     @java.lang.Override
equals(final java.lang.Object obj)41553     public boolean equals(final java.lang.Object obj) {
41554       if (obj == this) {
41555        return true;
41556       }
41557       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)) {
41558         return super.equals(obj);
41559       }
41560       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest) obj;
41561 
41562       boolean result = true;
41563       result = result && (hasRegex() == other.hasRegex());
41564       if (hasRegex()) {
41565         result = result && getRegex()
41566             .equals(other.getRegex());
41567       }
41568       result = result && (hasIncludeSysTables() == other.hasIncludeSysTables());
41569       if (hasIncludeSysTables()) {
41570         result = result && (getIncludeSysTables()
41571             == other.getIncludeSysTables());
41572       }
41573       result = result && (hasNamespace() == other.hasNamespace());
41574       if (hasNamespace()) {
41575         result = result && getNamespace()
41576             .equals(other.getNamespace());
41577       }
41578       result = result &&
41579           getUnknownFields().equals(other.getUnknownFields());
41580       return result;
41581     }
41582 
41583     private int memoizedHashCode = 0;
41584     @java.lang.Override
hashCode()41585     public int hashCode() {
41586       if (memoizedHashCode != 0) {
41587         return memoizedHashCode;
41588       }
41589       int hash = 41;
41590       hash = (19 * hash) + getDescriptorForType().hashCode();
41591       if (hasRegex()) {
41592         hash = (37 * hash) + REGEX_FIELD_NUMBER;
41593         hash = (53 * hash) + getRegex().hashCode();
41594       }
41595       if (hasIncludeSysTables()) {
41596         hash = (37 * hash) + INCLUDE_SYS_TABLES_FIELD_NUMBER;
41597         hash = (53 * hash) + hashBoolean(getIncludeSysTables());
41598       }
41599       if (hasNamespace()) {
41600         hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
41601         hash = (53 * hash) + getNamespace().hashCode();
41602       }
41603       hash = (29 * hash) + getUnknownFields().hashCode();
41604       memoizedHashCode = hash;
41605       return hash;
41606     }
41607 
parseFrom( com.google.protobuf.ByteString data)41608     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(
41609         com.google.protobuf.ByteString data)
41610         throws com.google.protobuf.InvalidProtocolBufferException {
41611       return PARSER.parseFrom(data);
41612     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41613     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(
41614         com.google.protobuf.ByteString data,
41615         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41616         throws com.google.protobuf.InvalidProtocolBufferException {
41617       return PARSER.parseFrom(data, extensionRegistry);
41618     }
parseFrom(byte[] data)41619     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(byte[] data)
41620         throws com.google.protobuf.InvalidProtocolBufferException {
41621       return PARSER.parseFrom(data);
41622     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41623     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(
41624         byte[] data,
41625         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41626         throws com.google.protobuf.InvalidProtocolBufferException {
41627       return PARSER.parseFrom(data, extensionRegistry);
41628     }
parseFrom(java.io.InputStream input)41629     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(java.io.InputStream input)
41630         throws java.io.IOException {
41631       return PARSER.parseFrom(input);
41632     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41633     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(
41634         java.io.InputStream input,
41635         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41636         throws java.io.IOException {
41637       return PARSER.parseFrom(input, extensionRegistry);
41638     }
parseDelimitedFrom(java.io.InputStream input)41639     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseDelimitedFrom(java.io.InputStream input)
41640         throws java.io.IOException {
41641       return PARSER.parseDelimitedFrom(input);
41642     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41643     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseDelimitedFrom(
41644         java.io.InputStream input,
41645         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41646         throws java.io.IOException {
41647       return PARSER.parseDelimitedFrom(input, extensionRegistry);
41648     }
parseFrom( com.google.protobuf.CodedInputStream input)41649     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(
41650         com.google.protobuf.CodedInputStream input)
41651         throws java.io.IOException {
41652       return PARSER.parseFrom(input);
41653     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41654     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parseFrom(
41655         com.google.protobuf.CodedInputStream input,
41656         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41657         throws java.io.IOException {
41658       return PARSER.parseFrom(input, extensionRegistry);
41659     }
41660 
newBuilder()41661     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()41662     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest prototype)41663     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest prototype) {
41664       return newBuilder().mergeFrom(prototype);
41665     }
toBuilder()41666     public Builder toBuilder() { return newBuilder(this); }
41667 
41668     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)41669     protected Builder newBuilderForType(
41670         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
41671       Builder builder = new Builder(parent);
41672       return builder;
41673     }
41674     /**
41675      * Protobuf type {@code GetTableNamesRequest}
41676      */
41677     public static final class Builder extends
41678         com.google.protobuf.GeneratedMessage.Builder<Builder>
41679        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequestOrBuilder {
41680       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()41681           getDescriptor() {
41682         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_descriptor;
41683       }
41684 
41685       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()41686           internalGetFieldAccessorTable() {
41687         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_fieldAccessorTable
41688             .ensureFieldAccessorsInitialized(
41689                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.Builder.class);
41690       }
41691 
41692       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.newBuilder()
Builder()41693       private Builder() {
41694         maybeForceBuilderInitialization();
41695       }
41696 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)41697       private Builder(
41698           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
41699         super(parent);
41700         maybeForceBuilderInitialization();
41701       }
maybeForceBuilderInitialization()41702       private void maybeForceBuilderInitialization() {
41703         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
41704         }
41705       }
create()41706       private static Builder create() {
41707         return new Builder();
41708       }
41709 
clear()41710       public Builder clear() {
41711         super.clear();
41712         regex_ = "";
41713         bitField0_ = (bitField0_ & ~0x00000001);
41714         includeSysTables_ = false;
41715         bitField0_ = (bitField0_ & ~0x00000002);
41716         namespace_ = "";
41717         bitField0_ = (bitField0_ & ~0x00000004);
41718         return this;
41719       }
41720 
clone()41721       public Builder clone() {
41722         return create().mergeFrom(buildPartial());
41723       }
41724 
41725       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()41726           getDescriptorForType() {
41727         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesRequest_descriptor;
41728       }
41729 
getDefaultInstanceForType()41730       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest getDefaultInstanceForType() {
41731         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance();
41732       }
41733 
build()41734       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest build() {
41735         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest result = buildPartial();
41736         if (!result.isInitialized()) {
41737           throw newUninitializedMessageException(result);
41738         }
41739         return result;
41740       }
41741 
buildPartial()41742       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest buildPartial() {
41743         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest(this);
41744         int from_bitField0_ = bitField0_;
41745         int to_bitField0_ = 0;
41746         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
41747           to_bitField0_ |= 0x00000001;
41748         }
41749         result.regex_ = regex_;
41750         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
41751           to_bitField0_ |= 0x00000002;
41752         }
41753         result.includeSysTables_ = includeSysTables_;
41754         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
41755           to_bitField0_ |= 0x00000004;
41756         }
41757         result.namespace_ = namespace_;
41758         result.bitField0_ = to_bitField0_;
41759         onBuilt();
41760         return result;
41761       }
41762 
mergeFrom(com.google.protobuf.Message other)41763       public Builder mergeFrom(com.google.protobuf.Message other) {
41764         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest) {
41765           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)other);
41766         } else {
41767           super.mergeFrom(other);
41768           return this;
41769         }
41770       }
41771 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest other)41772       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest other) {
41773         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance()) return this;
41774         if (other.hasRegex()) {
41775           bitField0_ |= 0x00000001;
41776           regex_ = other.regex_;
41777           onChanged();
41778         }
41779         if (other.hasIncludeSysTables()) {
41780           setIncludeSysTables(other.getIncludeSysTables());
41781         }
41782         if (other.hasNamespace()) {
41783           bitField0_ |= 0x00000004;
41784           namespace_ = other.namespace_;
41785           onChanged();
41786         }
41787         this.mergeUnknownFields(other.getUnknownFields());
41788         return this;
41789       }
41790 
isInitialized()41791       public final boolean isInitialized() {
41792         return true;
41793       }
41794 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)41795       public Builder mergeFrom(
41796           com.google.protobuf.CodedInputStream input,
41797           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
41798           throws java.io.IOException {
41799         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest parsedMessage = null;
41800         try {
41801           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
41802         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
41803           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest) e.getUnfinishedMessage();
41804           throw e;
41805         } finally {
41806           if (parsedMessage != null) {
41807             mergeFrom(parsedMessage);
41808           }
41809         }
41810         return this;
41811       }
41812       private int bitField0_;
41813 
41814       // optional string regex = 1;
41815       private java.lang.Object regex_ = "";
41816       /**
41817        * <code>optional string regex = 1;</code>
41818        */
hasRegex()41819       public boolean hasRegex() {
41820         return ((bitField0_ & 0x00000001) == 0x00000001);
41821       }
41822       /**
41823        * <code>optional string regex = 1;</code>
41824        */
getRegex()41825       public java.lang.String getRegex() {
41826         java.lang.Object ref = regex_;
41827         if (!(ref instanceof java.lang.String)) {
41828           java.lang.String s = ((com.google.protobuf.ByteString) ref)
41829               .toStringUtf8();
41830           regex_ = s;
41831           return s;
41832         } else {
41833           return (java.lang.String) ref;
41834         }
41835       }
41836       /**
41837        * <code>optional string regex = 1;</code>
41838        */
41839       public com.google.protobuf.ByteString
getRegexBytes()41840           getRegexBytes() {
41841         java.lang.Object ref = regex_;
41842         if (ref instanceof String) {
41843           com.google.protobuf.ByteString b =
41844               com.google.protobuf.ByteString.copyFromUtf8(
41845                   (java.lang.String) ref);
41846           regex_ = b;
41847           return b;
41848         } else {
41849           return (com.google.protobuf.ByteString) ref;
41850         }
41851       }
41852       /**
41853        * <code>optional string regex = 1;</code>
41854        */
setRegex( java.lang.String value)41855       public Builder setRegex(
41856           java.lang.String value) {
41857         if (value == null) {
41858     throw new NullPointerException();
41859   }
41860   bitField0_ |= 0x00000001;
41861         regex_ = value;
41862         onChanged();
41863         return this;
41864       }
41865       /**
41866        * <code>optional string regex = 1;</code>
41867        */
clearRegex()41868       public Builder clearRegex() {
41869         bitField0_ = (bitField0_ & ~0x00000001);
41870         regex_ = getDefaultInstance().getRegex();
41871         onChanged();
41872         return this;
41873       }
41874       /**
41875        * <code>optional string regex = 1;</code>
41876        */
setRegexBytes( com.google.protobuf.ByteString value)41877       public Builder setRegexBytes(
41878           com.google.protobuf.ByteString value) {
41879         if (value == null) {
41880     throw new NullPointerException();
41881   }
41882   bitField0_ |= 0x00000001;
41883         regex_ = value;
41884         onChanged();
41885         return this;
41886       }
41887 
41888       // optional bool include_sys_tables = 2 [default = false];
41889       private boolean includeSysTables_ ;
41890       /**
41891        * <code>optional bool include_sys_tables = 2 [default = false];</code>
41892        */
hasIncludeSysTables()41893       public boolean hasIncludeSysTables() {
41894         return ((bitField0_ & 0x00000002) == 0x00000002);
41895       }
41896       /**
41897        * <code>optional bool include_sys_tables = 2 [default = false];</code>
41898        */
getIncludeSysTables()41899       public boolean getIncludeSysTables() {
41900         return includeSysTables_;
41901       }
41902       /**
41903        * <code>optional bool include_sys_tables = 2 [default = false];</code>
41904        */
setIncludeSysTables(boolean value)41905       public Builder setIncludeSysTables(boolean value) {
41906         bitField0_ |= 0x00000002;
41907         includeSysTables_ = value;
41908         onChanged();
41909         return this;
41910       }
41911       /**
41912        * <code>optional bool include_sys_tables = 2 [default = false];</code>
41913        */
clearIncludeSysTables()41914       public Builder clearIncludeSysTables() {
41915         bitField0_ = (bitField0_ & ~0x00000002);
41916         includeSysTables_ = false;
41917         onChanged();
41918         return this;
41919       }
41920 
41921       // optional string namespace = 3;
41922       private java.lang.Object namespace_ = "";
41923       /**
41924        * <code>optional string namespace = 3;</code>
41925        */
hasNamespace()41926       public boolean hasNamespace() {
41927         return ((bitField0_ & 0x00000004) == 0x00000004);
41928       }
41929       /**
41930        * <code>optional string namespace = 3;</code>
41931        */
getNamespace()41932       public java.lang.String getNamespace() {
41933         java.lang.Object ref = namespace_;
41934         if (!(ref instanceof java.lang.String)) {
41935           java.lang.String s = ((com.google.protobuf.ByteString) ref)
41936               .toStringUtf8();
41937           namespace_ = s;
41938           return s;
41939         } else {
41940           return (java.lang.String) ref;
41941         }
41942       }
41943       /**
41944        * <code>optional string namespace = 3;</code>
41945        */
41946       public com.google.protobuf.ByteString
getNamespaceBytes()41947           getNamespaceBytes() {
41948         java.lang.Object ref = namespace_;
41949         if (ref instanceof String) {
41950           com.google.protobuf.ByteString b =
41951               com.google.protobuf.ByteString.copyFromUtf8(
41952                   (java.lang.String) ref);
41953           namespace_ = b;
41954           return b;
41955         } else {
41956           return (com.google.protobuf.ByteString) ref;
41957         }
41958       }
41959       /**
41960        * <code>optional string namespace = 3;</code>
41961        */
setNamespace( java.lang.String value)41962       public Builder setNamespace(
41963           java.lang.String value) {
41964         if (value == null) {
41965     throw new NullPointerException();
41966   }
41967   bitField0_ |= 0x00000004;
41968         namespace_ = value;
41969         onChanged();
41970         return this;
41971       }
41972       /**
41973        * <code>optional string namespace = 3;</code>
41974        */
clearNamespace()41975       public Builder clearNamespace() {
41976         bitField0_ = (bitField0_ & ~0x00000004);
41977         namespace_ = getDefaultInstance().getNamespace();
41978         onChanged();
41979         return this;
41980       }
41981       /**
41982        * <code>optional string namespace = 3;</code>
41983        */
setNamespaceBytes( com.google.protobuf.ByteString value)41984       public Builder setNamespaceBytes(
41985           com.google.protobuf.ByteString value) {
41986         if (value == null) {
41987     throw new NullPointerException();
41988   }
41989   bitField0_ |= 0x00000004;
41990         namespace_ = value;
41991         onChanged();
41992         return this;
41993       }
41994 
41995       // @@protoc_insertion_point(builder_scope:GetTableNamesRequest)
41996     }
41997 
41998     static {
41999       defaultInstance = new GetTableNamesRequest(true);
defaultInstance.initFields()42000       defaultInstance.initFields();
42001     }
42002 
42003     // @@protoc_insertion_point(class_scope:GetTableNamesRequest)
42004   }
42005 
42006   public interface GetTableNamesResponseOrBuilder
42007       extends com.google.protobuf.MessageOrBuilder {
42008 
42009     // repeated .TableName table_names = 1;
42010     /**
42011      * <code>repeated .TableName table_names = 1;</code>
42012      */
42013     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>
getTableNamesList()42014         getTableNamesList();
42015     /**
42016      * <code>repeated .TableName table_names = 1;</code>
42017      */
getTableNames(int index)42018     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index);
42019     /**
42020      * <code>repeated .TableName table_names = 1;</code>
42021      */
getTableNamesCount()42022     int getTableNamesCount();
42023     /**
42024      * <code>repeated .TableName table_names = 1;</code>
42025      */
42026     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesOrBuilderList()42027         getTableNamesOrBuilderList();
42028     /**
42029      * <code>repeated .TableName table_names = 1;</code>
42030      */
getTableNamesOrBuilder( int index)42031     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder(
42032         int index);
42033   }
42034   /**
42035    * Protobuf type {@code GetTableNamesResponse}
42036    */
42037   public static final class GetTableNamesResponse extends
42038       com.google.protobuf.GeneratedMessage
42039       implements GetTableNamesResponseOrBuilder {
42040     // Use GetTableNamesResponse.newBuilder() to construct.
GetTableNamesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)42041     private GetTableNamesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
42042       super(builder);
42043       this.unknownFields = builder.getUnknownFields();
42044     }
GetTableNamesResponse(boolean noInit)42045     private GetTableNamesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
42046 
42047     private static final GetTableNamesResponse defaultInstance;
getDefaultInstance()42048     public static GetTableNamesResponse getDefaultInstance() {
42049       return defaultInstance;
42050     }
42051 
getDefaultInstanceForType()42052     public GetTableNamesResponse getDefaultInstanceForType() {
42053       return defaultInstance;
42054     }
42055 
42056     private final com.google.protobuf.UnknownFieldSet unknownFields;
42057     @java.lang.Override
42058     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()42059         getUnknownFields() {
42060       return this.unknownFields;
42061     }
GetTableNamesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42062     private GetTableNamesResponse(
42063         com.google.protobuf.CodedInputStream input,
42064         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42065         throws com.google.protobuf.InvalidProtocolBufferException {
42066       initFields();
42067       int mutable_bitField0_ = 0;
42068       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
42069           com.google.protobuf.UnknownFieldSet.newBuilder();
42070       try {
42071         boolean done = false;
42072         while (!done) {
42073           int tag = input.readTag();
42074           switch (tag) {
42075             case 0:
42076               done = true;
42077               break;
42078             default: {
42079               if (!parseUnknownField(input, unknownFields,
42080                                      extensionRegistry, tag)) {
42081                 done = true;
42082               }
42083               break;
42084             }
42085             case 10: {
42086               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
42087                 tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>();
42088                 mutable_bitField0_ |= 0x00000001;
42089               }
42090               tableNames_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry));
42091               break;
42092             }
42093           }
42094         }
42095       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
42096         throw e.setUnfinishedMessage(this);
42097       } catch (java.io.IOException e) {
42098         throw new com.google.protobuf.InvalidProtocolBufferException(
42099             e.getMessage()).setUnfinishedMessage(this);
42100       } finally {
42101         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
42102           tableNames_ = java.util.Collections.unmodifiableList(tableNames_);
42103         }
42104         this.unknownFields = unknownFields.build();
42105         makeExtensionsImmutable();
42106       }
42107     }
42108     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()42109         getDescriptor() {
42110       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_descriptor;
42111     }
42112 
42113     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()42114         internalGetFieldAccessorTable() {
42115       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_fieldAccessorTable
42116           .ensureFieldAccessorsInitialized(
42117               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.Builder.class);
42118     }
42119 
42120     public static com.google.protobuf.Parser<GetTableNamesResponse> PARSER =
42121         new com.google.protobuf.AbstractParser<GetTableNamesResponse>() {
42122       public GetTableNamesResponse parsePartialFrom(
42123           com.google.protobuf.CodedInputStream input,
42124           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42125           throws com.google.protobuf.InvalidProtocolBufferException {
42126         return new GetTableNamesResponse(input, extensionRegistry);
42127       }
42128     };
42129 
42130     @java.lang.Override
getParserForType()42131     public com.google.protobuf.Parser<GetTableNamesResponse> getParserForType() {
42132       return PARSER;
42133     }
42134 
42135     // repeated .TableName table_names = 1;
42136     public static final int TABLE_NAMES_FIELD_NUMBER = 1;
42137     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_;
42138     /**
42139      * <code>repeated .TableName table_names = 1;</code>
42140      */
getTableNamesList()42141     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() {
42142       return tableNames_;
42143     }
42144     /**
42145      * <code>repeated .TableName table_names = 1;</code>
42146      */
42147     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesOrBuilderList()42148         getTableNamesOrBuilderList() {
42149       return tableNames_;
42150     }
42151     /**
42152      * <code>repeated .TableName table_names = 1;</code>
42153      */
getTableNamesCount()42154     public int getTableNamesCount() {
42155       return tableNames_.size();
42156     }
42157     /**
42158      * <code>repeated .TableName table_names = 1;</code>
42159      */
getTableNames(int index)42160     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) {
42161       return tableNames_.get(index);
42162     }
42163     /**
42164      * <code>repeated .TableName table_names = 1;</code>
42165      */
getTableNamesOrBuilder( int index)42166     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder(
42167         int index) {
42168       return tableNames_.get(index);
42169     }
42170 
initFields()42171     private void initFields() {
42172       tableNames_ = java.util.Collections.emptyList();
42173     }
42174     private byte memoizedIsInitialized = -1;
isInitialized()42175     public final boolean isInitialized() {
42176       byte isInitialized = memoizedIsInitialized;
42177       if (isInitialized != -1) return isInitialized == 1;
42178 
42179       for (int i = 0; i < getTableNamesCount(); i++) {
42180         if (!getTableNames(i).isInitialized()) {
42181           memoizedIsInitialized = 0;
42182           return false;
42183         }
42184       }
42185       memoizedIsInitialized = 1;
42186       return true;
42187     }
42188 
writeTo(com.google.protobuf.CodedOutputStream output)42189     public void writeTo(com.google.protobuf.CodedOutputStream output)
42190                         throws java.io.IOException {
42191       getSerializedSize();
42192       for (int i = 0; i < tableNames_.size(); i++) {
42193         output.writeMessage(1, tableNames_.get(i));
42194       }
42195       getUnknownFields().writeTo(output);
42196     }
42197 
42198     private int memoizedSerializedSize = -1;
getSerializedSize()42199     public int getSerializedSize() {
42200       int size = memoizedSerializedSize;
42201       if (size != -1) return size;
42202 
42203       size = 0;
42204       for (int i = 0; i < tableNames_.size(); i++) {
42205         size += com.google.protobuf.CodedOutputStream
42206           .computeMessageSize(1, tableNames_.get(i));
42207       }
42208       size += getUnknownFields().getSerializedSize();
42209       memoizedSerializedSize = size;
42210       return size;
42211     }
42212 
42213     private static final long serialVersionUID = 0L;
42214     @java.lang.Override
writeReplace()42215     protected java.lang.Object writeReplace()
42216         throws java.io.ObjectStreamException {
42217       return super.writeReplace();
42218     }
42219 
42220     @java.lang.Override
equals(final java.lang.Object obj)42221     public boolean equals(final java.lang.Object obj) {
42222       if (obj == this) {
42223        return true;
42224       }
42225       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse)) {
42226         return super.equals(obj);
42227       }
42228       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) obj;
42229 
42230       boolean result = true;
42231       result = result && getTableNamesList()
42232           .equals(other.getTableNamesList());
42233       result = result &&
42234           getUnknownFields().equals(other.getUnknownFields());
42235       return result;
42236     }
42237 
42238     private int memoizedHashCode = 0;
42239     @java.lang.Override
hashCode()42240     public int hashCode() {
42241       if (memoizedHashCode != 0) {
42242         return memoizedHashCode;
42243       }
42244       int hash = 41;
42245       hash = (19 * hash) + getDescriptorForType().hashCode();
42246       if (getTableNamesCount() > 0) {
42247         hash = (37 * hash) + TABLE_NAMES_FIELD_NUMBER;
42248         hash = (53 * hash) + getTableNamesList().hashCode();
42249       }
42250       hash = (29 * hash) + getUnknownFields().hashCode();
42251       memoizedHashCode = hash;
42252       return hash;
42253     }
42254 
parseFrom( com.google.protobuf.ByteString data)42255     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(
42256         com.google.protobuf.ByteString data)
42257         throws com.google.protobuf.InvalidProtocolBufferException {
42258       return PARSER.parseFrom(data);
42259     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42260     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(
42261         com.google.protobuf.ByteString data,
42262         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42263         throws com.google.protobuf.InvalidProtocolBufferException {
42264       return PARSER.parseFrom(data, extensionRegistry);
42265     }
parseFrom(byte[] data)42266     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(byte[] data)
42267         throws com.google.protobuf.InvalidProtocolBufferException {
42268       return PARSER.parseFrom(data);
42269     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42270     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(
42271         byte[] data,
42272         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42273         throws com.google.protobuf.InvalidProtocolBufferException {
42274       return PARSER.parseFrom(data, extensionRegistry);
42275     }
parseFrom(java.io.InputStream input)42276     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(java.io.InputStream input)
42277         throws java.io.IOException {
42278       return PARSER.parseFrom(input);
42279     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42280     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(
42281         java.io.InputStream input,
42282         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42283         throws java.io.IOException {
42284       return PARSER.parseFrom(input, extensionRegistry);
42285     }
parseDelimitedFrom(java.io.InputStream input)42286     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseDelimitedFrom(java.io.InputStream input)
42287         throws java.io.IOException {
42288       return PARSER.parseDelimitedFrom(input);
42289     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42290     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseDelimitedFrom(
42291         java.io.InputStream input,
42292         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42293         throws java.io.IOException {
42294       return PARSER.parseDelimitedFrom(input, extensionRegistry);
42295     }
parseFrom( com.google.protobuf.CodedInputStream input)42296     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(
42297         com.google.protobuf.CodedInputStream input)
42298         throws java.io.IOException {
42299       return PARSER.parseFrom(input);
42300     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42301     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parseFrom(
42302         com.google.protobuf.CodedInputStream input,
42303         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42304         throws java.io.IOException {
42305       return PARSER.parseFrom(input, extensionRegistry);
42306     }
42307 
newBuilder()42308     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()42309     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse prototype)42310     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse prototype) {
42311       return newBuilder().mergeFrom(prototype);
42312     }
toBuilder()42313     public Builder toBuilder() { return newBuilder(this); }
42314 
42315     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)42316     protected Builder newBuilderForType(
42317         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
42318       Builder builder = new Builder(parent);
42319       return builder;
42320     }
42321     /**
42322      * Protobuf type {@code GetTableNamesResponse}
42323      */
42324     public static final class Builder extends
42325         com.google.protobuf.GeneratedMessage.Builder<Builder>
42326        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponseOrBuilder {
42327       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()42328           getDescriptor() {
42329         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_descriptor;
42330       }
42331 
42332       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()42333           internalGetFieldAccessorTable() {
42334         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_fieldAccessorTable
42335             .ensureFieldAccessorsInitialized(
42336                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.Builder.class);
42337       }
42338 
42339       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.newBuilder()
Builder()42340       private Builder() {
42341         maybeForceBuilderInitialization();
42342       }
42343 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)42344       private Builder(
42345           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
42346         super(parent);
42347         maybeForceBuilderInitialization();
42348       }
maybeForceBuilderInitialization()42349       private void maybeForceBuilderInitialization() {
42350         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
42351           getTableNamesFieldBuilder();
42352         }
42353       }
create()42354       private static Builder create() {
42355         return new Builder();
42356       }
42357 
clear()42358       public Builder clear() {
42359         super.clear();
42360         if (tableNamesBuilder_ == null) {
42361           tableNames_ = java.util.Collections.emptyList();
42362           bitField0_ = (bitField0_ & ~0x00000001);
42363         } else {
42364           tableNamesBuilder_.clear();
42365         }
42366         return this;
42367       }
42368 
clone()42369       public Builder clone() {
42370         return create().mergeFrom(buildPartial());
42371       }
42372 
42373       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()42374           getDescriptorForType() {
42375         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetTableNamesResponse_descriptor;
42376       }
42377 
getDefaultInstanceForType()42378       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse getDefaultInstanceForType() {
42379         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance();
42380       }
42381 
build()42382       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse build() {
42383         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse result = buildPartial();
42384         if (!result.isInitialized()) {
42385           throw newUninitializedMessageException(result);
42386         }
42387         return result;
42388       }
42389 
buildPartial()42390       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse buildPartial() {
42391         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse(this);
42392         int from_bitField0_ = bitField0_;
42393         if (tableNamesBuilder_ == null) {
42394           if (((bitField0_ & 0x00000001) == 0x00000001)) {
42395             tableNames_ = java.util.Collections.unmodifiableList(tableNames_);
42396             bitField0_ = (bitField0_ & ~0x00000001);
42397           }
42398           result.tableNames_ = tableNames_;
42399         } else {
42400           result.tableNames_ = tableNamesBuilder_.build();
42401         }
42402         onBuilt();
42403         return result;
42404       }
42405 
mergeFrom(com.google.protobuf.Message other)42406       public Builder mergeFrom(com.google.protobuf.Message other) {
42407         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) {
42408           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse)other);
42409         } else {
42410           super.mergeFrom(other);
42411           return this;
42412         }
42413       }
42414 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse other)42415       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse other) {
42416         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance()) return this;
42417         if (tableNamesBuilder_ == null) {
42418           if (!other.tableNames_.isEmpty()) {
42419             if (tableNames_.isEmpty()) {
42420               tableNames_ = other.tableNames_;
42421               bitField0_ = (bitField0_ & ~0x00000001);
42422             } else {
42423               ensureTableNamesIsMutable();
42424               tableNames_.addAll(other.tableNames_);
42425             }
42426             onChanged();
42427           }
42428         } else {
42429           if (!other.tableNames_.isEmpty()) {
42430             if (tableNamesBuilder_.isEmpty()) {
42431               tableNamesBuilder_.dispose();
42432               tableNamesBuilder_ = null;
42433               tableNames_ = other.tableNames_;
42434               bitField0_ = (bitField0_ & ~0x00000001);
42435               tableNamesBuilder_ =
42436                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
42437                    getTableNamesFieldBuilder() : null;
42438             } else {
42439               tableNamesBuilder_.addAllMessages(other.tableNames_);
42440             }
42441           }
42442         }
42443         this.mergeUnknownFields(other.getUnknownFields());
42444         return this;
42445       }
42446 
isInitialized()42447       public final boolean isInitialized() {
42448         for (int i = 0; i < getTableNamesCount(); i++) {
42449           if (!getTableNames(i).isInitialized()) {
42450 
42451             return false;
42452           }
42453         }
42454         return true;
42455       }
42456 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42457       public Builder mergeFrom(
42458           com.google.protobuf.CodedInputStream input,
42459           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42460           throws java.io.IOException {
42461         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse parsedMessage = null;
42462         try {
42463           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
42464         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
42465           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) e.getUnfinishedMessage();
42466           throw e;
42467         } finally {
42468           if (parsedMessage != null) {
42469             mergeFrom(parsedMessage);
42470           }
42471         }
42472         return this;
42473       }
42474       private int bitField0_;
42475 
42476       // repeated .TableName table_names = 1;
42477       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> tableNames_ =
42478         java.util.Collections.emptyList();
ensureTableNamesIsMutable()42479       private void ensureTableNamesIsMutable() {
42480         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
42481           tableNames_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName>(tableNames_);
42482           bitField0_ |= 0x00000001;
42483          }
42484       }
42485 
42486       private com.google.protobuf.RepeatedFieldBuilder<
42487           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNamesBuilder_;
42488 
42489       /**
42490        * <code>repeated .TableName table_names = 1;</code>
42491        */
getTableNamesList()42492       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> getTableNamesList() {
42493         if (tableNamesBuilder_ == null) {
42494           return java.util.Collections.unmodifiableList(tableNames_);
42495         } else {
42496           return tableNamesBuilder_.getMessageList();
42497         }
42498       }
42499       /**
42500        * <code>repeated .TableName table_names = 1;</code>
42501        */
getTableNamesCount()42502       public int getTableNamesCount() {
42503         if (tableNamesBuilder_ == null) {
42504           return tableNames_.size();
42505         } else {
42506           return tableNamesBuilder_.getCount();
42507         }
42508       }
42509       /**
42510        * <code>repeated .TableName table_names = 1;</code>
42511        */
getTableNames(int index)42512       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableNames(int index) {
42513         if (tableNamesBuilder_ == null) {
42514           return tableNames_.get(index);
42515         } else {
42516           return tableNamesBuilder_.getMessage(index);
42517         }
42518       }
42519       /**
42520        * <code>repeated .TableName table_names = 1;</code>
42521        */
setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)42522       public Builder setTableNames(
42523           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
42524         if (tableNamesBuilder_ == null) {
42525           if (value == null) {
42526             throw new NullPointerException();
42527           }
42528           ensureTableNamesIsMutable();
42529           tableNames_.set(index, value);
42530           onChanged();
42531         } else {
42532           tableNamesBuilder_.setMessage(index, value);
42533         }
42534         return this;
42535       }
42536       /**
42537        * <code>repeated .TableName table_names = 1;</code>
42538        */
setTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)42539       public Builder setTableNames(
42540           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
42541         if (tableNamesBuilder_ == null) {
42542           ensureTableNamesIsMutable();
42543           tableNames_.set(index, builderForValue.build());
42544           onChanged();
42545         } else {
42546           tableNamesBuilder_.setMessage(index, builderForValue.build());
42547         }
42548         return this;
42549       }
42550       /**
42551        * <code>repeated .TableName table_names = 1;</code>
42552        */
addTableNames(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)42553       public Builder addTableNames(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
42554         if (tableNamesBuilder_ == null) {
42555           if (value == null) {
42556             throw new NullPointerException();
42557           }
42558           ensureTableNamesIsMutable();
42559           tableNames_.add(value);
42560           onChanged();
42561         } else {
42562           tableNamesBuilder_.addMessage(value);
42563         }
42564         return this;
42565       }
42566       /**
42567        * <code>repeated .TableName table_names = 1;</code>
42568        */
addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)42569       public Builder addTableNames(
42570           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
42571         if (tableNamesBuilder_ == null) {
42572           if (value == null) {
42573             throw new NullPointerException();
42574           }
42575           ensureTableNamesIsMutable();
42576           tableNames_.add(index, value);
42577           onChanged();
42578         } else {
42579           tableNamesBuilder_.addMessage(index, value);
42580         }
42581         return this;
42582       }
42583       /**
42584        * <code>repeated .TableName table_names = 1;</code>
42585        */
addTableNames( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)42586       public Builder addTableNames(
42587           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
42588         if (tableNamesBuilder_ == null) {
42589           ensureTableNamesIsMutable();
42590           tableNames_.add(builderForValue.build());
42591           onChanged();
42592         } else {
42593           tableNamesBuilder_.addMessage(builderForValue.build());
42594         }
42595         return this;
42596       }
42597       /**
42598        * <code>repeated .TableName table_names = 1;</code>
42599        */
addTableNames( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)42600       public Builder addTableNames(
42601           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
42602         if (tableNamesBuilder_ == null) {
42603           ensureTableNamesIsMutable();
42604           tableNames_.add(index, builderForValue.build());
42605           onChanged();
42606         } else {
42607           tableNamesBuilder_.addMessage(index, builderForValue.build());
42608         }
42609         return this;
42610       }
42611       /**
42612        * <code>repeated .TableName table_names = 1;</code>
42613        */
addAllTableNames( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values)42614       public Builder addAllTableNames(
42615           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName> values) {
42616         if (tableNamesBuilder_ == null) {
42617           ensureTableNamesIsMutable();
42618           super.addAll(values, tableNames_);
42619           onChanged();
42620         } else {
42621           tableNamesBuilder_.addAllMessages(values);
42622         }
42623         return this;
42624       }
42625       /**
42626        * <code>repeated .TableName table_names = 1;</code>
42627        */
clearTableNames()42628       public Builder clearTableNames() {
42629         if (tableNamesBuilder_ == null) {
42630           tableNames_ = java.util.Collections.emptyList();
42631           bitField0_ = (bitField0_ & ~0x00000001);
42632           onChanged();
42633         } else {
42634           tableNamesBuilder_.clear();
42635         }
42636         return this;
42637       }
42638       /**
42639        * <code>repeated .TableName table_names = 1;</code>
42640        */
removeTableNames(int index)42641       public Builder removeTableNames(int index) {
42642         if (tableNamesBuilder_ == null) {
42643           ensureTableNamesIsMutable();
42644           tableNames_.remove(index);
42645           onChanged();
42646         } else {
42647           tableNamesBuilder_.remove(index);
42648         }
42649         return this;
42650       }
42651       /**
42652        * <code>repeated .TableName table_names = 1;</code>
42653        */
getTableNamesBuilder( int index)42654       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNamesBuilder(
42655           int index) {
42656         return getTableNamesFieldBuilder().getBuilder(index);
42657       }
42658       /**
42659        * <code>repeated .TableName table_names = 1;</code>
42660        */
getTableNamesOrBuilder( int index)42661       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNamesOrBuilder(
42662           int index) {
42663         if (tableNamesBuilder_ == null) {
42664           return tableNames_.get(index);  } else {
42665           return tableNamesBuilder_.getMessageOrBuilder(index);
42666         }
42667       }
42668       /**
42669        * <code>repeated .TableName table_names = 1;</code>
42670        */
42671       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesOrBuilderList()42672            getTableNamesOrBuilderList() {
42673         if (tableNamesBuilder_ != null) {
42674           return tableNamesBuilder_.getMessageOrBuilderList();
42675         } else {
42676           return java.util.Collections.unmodifiableList(tableNames_);
42677         }
42678       }
42679       /**
42680        * <code>repeated .TableName table_names = 1;</code>
42681        */
addTableNamesBuilder()42682       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder() {
42683         return getTableNamesFieldBuilder().addBuilder(
42684             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
42685       }
42686       /**
42687        * <code>repeated .TableName table_names = 1;</code>
42688        */
addTableNamesBuilder( int index)42689       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder addTableNamesBuilder(
42690           int index) {
42691         return getTableNamesFieldBuilder().addBuilder(
42692             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance());
42693       }
42694       /**
42695        * <code>repeated .TableName table_names = 1;</code>
42696        */
42697       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder>
getTableNamesBuilderList()42698            getTableNamesBuilderList() {
42699         return getTableNamesFieldBuilder().getBuilderList();
42700       }
42701       private com.google.protobuf.RepeatedFieldBuilder<
42702           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNamesFieldBuilder()42703           getTableNamesFieldBuilder() {
42704         if (tableNamesBuilder_ == null) {
42705           tableNamesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
42706               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
42707                   tableNames_,
42708                   ((bitField0_ & 0x00000001) == 0x00000001),
42709                   getParentForChildren(),
42710                   isClean());
42711           tableNames_ = null;
42712         }
42713         return tableNamesBuilder_;
42714       }
42715 
42716       // @@protoc_insertion_point(builder_scope:GetTableNamesResponse)
42717     }
42718 
42719     static {
42720       defaultInstance = new GetTableNamesResponse(true);
defaultInstance.initFields()42721       defaultInstance.initFields();
42722     }
42723 
42724     // @@protoc_insertion_point(class_scope:GetTableNamesResponse)
42725   }
42726 
42727   public interface GetClusterStatusRequestOrBuilder
42728       extends com.google.protobuf.MessageOrBuilder {
42729   }
42730   /**
42731    * Protobuf type {@code GetClusterStatusRequest}
42732    */
42733   public static final class GetClusterStatusRequest extends
42734       com.google.protobuf.GeneratedMessage
42735       implements GetClusterStatusRequestOrBuilder {
42736     // Use GetClusterStatusRequest.newBuilder() to construct.
GetClusterStatusRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)42737     private GetClusterStatusRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
42738       super(builder);
42739       this.unknownFields = builder.getUnknownFields();
42740     }
GetClusterStatusRequest(boolean noInit)42741     private GetClusterStatusRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
42742 
42743     private static final GetClusterStatusRequest defaultInstance;
getDefaultInstance()42744     public static GetClusterStatusRequest getDefaultInstance() {
42745       return defaultInstance;
42746     }
42747 
getDefaultInstanceForType()42748     public GetClusterStatusRequest getDefaultInstanceForType() {
42749       return defaultInstance;
42750     }
42751 
42752     private final com.google.protobuf.UnknownFieldSet unknownFields;
42753     @java.lang.Override
42754     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()42755         getUnknownFields() {
42756       return this.unknownFields;
42757     }
GetClusterStatusRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42758     private GetClusterStatusRequest(
42759         com.google.protobuf.CodedInputStream input,
42760         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42761         throws com.google.protobuf.InvalidProtocolBufferException {
42762       initFields();
42763       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
42764           com.google.protobuf.UnknownFieldSet.newBuilder();
42765       try {
42766         boolean done = false;
42767         while (!done) {
42768           int tag = input.readTag();
42769           switch (tag) {
42770             case 0:
42771               done = true;
42772               break;
42773             default: {
42774               if (!parseUnknownField(input, unknownFields,
42775                                      extensionRegistry, tag)) {
42776                 done = true;
42777               }
42778               break;
42779             }
42780           }
42781         }
42782       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
42783         throw e.setUnfinishedMessage(this);
42784       } catch (java.io.IOException e) {
42785         throw new com.google.protobuf.InvalidProtocolBufferException(
42786             e.getMessage()).setUnfinishedMessage(this);
42787       } finally {
42788         this.unknownFields = unknownFields.build();
42789         makeExtensionsImmutable();
42790       }
42791     }
42792     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()42793         getDescriptor() {
42794       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_descriptor;
42795     }
42796 
42797     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()42798         internalGetFieldAccessorTable() {
42799       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable
42800           .ensureFieldAccessorsInitialized(
42801               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.Builder.class);
42802     }
42803 
42804     public static com.google.protobuf.Parser<GetClusterStatusRequest> PARSER =
42805         new com.google.protobuf.AbstractParser<GetClusterStatusRequest>() {
42806       public GetClusterStatusRequest parsePartialFrom(
42807           com.google.protobuf.CodedInputStream input,
42808           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42809           throws com.google.protobuf.InvalidProtocolBufferException {
42810         return new GetClusterStatusRequest(input, extensionRegistry);
42811       }
42812     };
42813 
42814     @java.lang.Override
getParserForType()42815     public com.google.protobuf.Parser<GetClusterStatusRequest> getParserForType() {
42816       return PARSER;
42817     }
42818 
initFields()42819     private void initFields() {
42820     }
42821     private byte memoizedIsInitialized = -1;
isInitialized()42822     public final boolean isInitialized() {
42823       byte isInitialized = memoizedIsInitialized;
42824       if (isInitialized != -1) return isInitialized == 1;
42825 
42826       memoizedIsInitialized = 1;
42827       return true;
42828     }
42829 
writeTo(com.google.protobuf.CodedOutputStream output)42830     public void writeTo(com.google.protobuf.CodedOutputStream output)
42831                         throws java.io.IOException {
42832       getSerializedSize();
42833       getUnknownFields().writeTo(output);
42834     }
42835 
42836     private int memoizedSerializedSize = -1;
getSerializedSize()42837     public int getSerializedSize() {
42838       int size = memoizedSerializedSize;
42839       if (size != -1) return size;
42840 
42841       size = 0;
42842       size += getUnknownFields().getSerializedSize();
42843       memoizedSerializedSize = size;
42844       return size;
42845     }
42846 
42847     private static final long serialVersionUID = 0L;
42848     @java.lang.Override
writeReplace()42849     protected java.lang.Object writeReplace()
42850         throws java.io.ObjectStreamException {
42851       return super.writeReplace();
42852     }
42853 
42854     @java.lang.Override
equals(final java.lang.Object obj)42855     public boolean equals(final java.lang.Object obj) {
42856       if (obj == this) {
42857        return true;
42858       }
42859       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)) {
42860         return super.equals(obj);
42861       }
42862       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest) obj;
42863 
42864       boolean result = true;
42865       result = result &&
42866           getUnknownFields().equals(other.getUnknownFields());
42867       return result;
42868     }
42869 
42870     private int memoizedHashCode = 0;
42871     @java.lang.Override
hashCode()42872     public int hashCode() {
42873       if (memoizedHashCode != 0) {
42874         return memoizedHashCode;
42875       }
42876       int hash = 41;
42877       hash = (19 * hash) + getDescriptorForType().hashCode();
42878       hash = (29 * hash) + getUnknownFields().hashCode();
42879       memoizedHashCode = hash;
42880       return hash;
42881     }
42882 
parseFrom( com.google.protobuf.ByteString data)42883     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(
42884         com.google.protobuf.ByteString data)
42885         throws com.google.protobuf.InvalidProtocolBufferException {
42886       return PARSER.parseFrom(data);
42887     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42888     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(
42889         com.google.protobuf.ByteString data,
42890         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42891         throws com.google.protobuf.InvalidProtocolBufferException {
42892       return PARSER.parseFrom(data, extensionRegistry);
42893     }
parseFrom(byte[] data)42894     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(byte[] data)
42895         throws com.google.protobuf.InvalidProtocolBufferException {
42896       return PARSER.parseFrom(data);
42897     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42898     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(
42899         byte[] data,
42900         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42901         throws com.google.protobuf.InvalidProtocolBufferException {
42902       return PARSER.parseFrom(data, extensionRegistry);
42903     }
parseFrom(java.io.InputStream input)42904     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(java.io.InputStream input)
42905         throws java.io.IOException {
42906       return PARSER.parseFrom(input);
42907     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42908     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(
42909         java.io.InputStream input,
42910         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42911         throws java.io.IOException {
42912       return PARSER.parseFrom(input, extensionRegistry);
42913     }
parseDelimitedFrom(java.io.InputStream input)42914     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseDelimitedFrom(java.io.InputStream input)
42915         throws java.io.IOException {
42916       return PARSER.parseDelimitedFrom(input);
42917     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42918     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseDelimitedFrom(
42919         java.io.InputStream input,
42920         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42921         throws java.io.IOException {
42922       return PARSER.parseDelimitedFrom(input, extensionRegistry);
42923     }
parseFrom( com.google.protobuf.CodedInputStream input)42924     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(
42925         com.google.protobuf.CodedInputStream input)
42926         throws java.io.IOException {
42927       return PARSER.parseFrom(input);
42928     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)42929     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parseFrom(
42930         com.google.protobuf.CodedInputStream input,
42931         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
42932         throws java.io.IOException {
42933       return PARSER.parseFrom(input, extensionRegistry);
42934     }
42935 
newBuilder()42936     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()42937     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest prototype)42938     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest prototype) {
42939       return newBuilder().mergeFrom(prototype);
42940     }
toBuilder()42941     public Builder toBuilder() { return newBuilder(this); }
42942 
42943     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)42944     protected Builder newBuilderForType(
42945         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
42946       Builder builder = new Builder(parent);
42947       return builder;
42948     }
42949     /**
42950      * Protobuf type {@code GetClusterStatusRequest}
42951      */
42952     public static final class Builder extends
42953         com.google.protobuf.GeneratedMessage.Builder<Builder>
42954        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequestOrBuilder {
42955       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()42956           getDescriptor() {
42957         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_descriptor;
42958       }
42959 
42960       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()42961           internalGetFieldAccessorTable() {
42962         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_fieldAccessorTable
42963             .ensureFieldAccessorsInitialized(
42964                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.Builder.class);
42965       }
42966 
42967       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.newBuilder()
Builder()42968       private Builder() {
42969         maybeForceBuilderInitialization();
42970       }
42971 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)42972       private Builder(
42973           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
42974         super(parent);
42975         maybeForceBuilderInitialization();
42976       }
maybeForceBuilderInitialization()42977       private void maybeForceBuilderInitialization() {
42978         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
42979         }
42980       }
create()42981       private static Builder create() {
42982         return new Builder();
42983       }
42984 
clear()42985       public Builder clear() {
42986         super.clear();
42987         return this;
42988       }
42989 
clone()42990       public Builder clone() {
42991         return create().mergeFrom(buildPartial());
42992       }
42993 
42994       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()42995           getDescriptorForType() {
42996         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusRequest_descriptor;
42997       }
42998 
getDefaultInstanceForType()42999       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest getDefaultInstanceForType() {
43000         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance();
43001       }
43002 
build()43003       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest build() {
43004         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest result = buildPartial();
43005         if (!result.isInitialized()) {
43006           throw newUninitializedMessageException(result);
43007         }
43008         return result;
43009       }
43010 
buildPartial()43011       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest buildPartial() {
43012         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest(this);
43013         onBuilt();
43014         return result;
43015       }
43016 
mergeFrom(com.google.protobuf.Message other)43017       public Builder mergeFrom(com.google.protobuf.Message other) {
43018         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest) {
43019           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)other);
43020         } else {
43021           super.mergeFrom(other);
43022           return this;
43023         }
43024       }
43025 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest other)43026       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest other) {
43027         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance()) return this;
43028         this.mergeUnknownFields(other.getUnknownFields());
43029         return this;
43030       }
43031 
isInitialized()43032       public final boolean isInitialized() {
43033         return true;
43034       }
43035 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43036       public Builder mergeFrom(
43037           com.google.protobuf.CodedInputStream input,
43038           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43039           throws java.io.IOException {
43040         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest parsedMessage = null;
43041         try {
43042           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
43043         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
43044           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest) e.getUnfinishedMessage();
43045           throw e;
43046         } finally {
43047           if (parsedMessage != null) {
43048             mergeFrom(parsedMessage);
43049           }
43050         }
43051         return this;
43052       }
43053 
43054       // @@protoc_insertion_point(builder_scope:GetClusterStatusRequest)
43055     }
43056 
43057     static {
43058       defaultInstance = new GetClusterStatusRequest(true);
defaultInstance.initFields()43059       defaultInstance.initFields();
43060     }
43061 
43062     // @@protoc_insertion_point(class_scope:GetClusterStatusRequest)
43063   }
43064 
43065   public interface GetClusterStatusResponseOrBuilder
43066       extends com.google.protobuf.MessageOrBuilder {
43067 
43068     // required .ClusterStatus cluster_status = 1;
43069     /**
43070      * <code>required .ClusterStatus cluster_status = 1;</code>
43071      */
hasClusterStatus()43072     boolean hasClusterStatus();
43073     /**
43074      * <code>required .ClusterStatus cluster_status = 1;</code>
43075      */
getClusterStatus()43076     org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus();
43077     /**
43078      * <code>required .ClusterStatus cluster_status = 1;</code>
43079      */
getClusterStatusOrBuilder()43080     org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder();
43081   }
43082   /**
43083    * Protobuf type {@code GetClusterStatusResponse}
43084    */
43085   public static final class GetClusterStatusResponse extends
43086       com.google.protobuf.GeneratedMessage
43087       implements GetClusterStatusResponseOrBuilder {
43088     // Use GetClusterStatusResponse.newBuilder() to construct.
GetClusterStatusResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)43089     private GetClusterStatusResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
43090       super(builder);
43091       this.unknownFields = builder.getUnknownFields();
43092     }
GetClusterStatusResponse(boolean noInit)43093     private GetClusterStatusResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
43094 
43095     private static final GetClusterStatusResponse defaultInstance;
getDefaultInstance()43096     public static GetClusterStatusResponse getDefaultInstance() {
43097       return defaultInstance;
43098     }
43099 
getDefaultInstanceForType()43100     public GetClusterStatusResponse getDefaultInstanceForType() {
43101       return defaultInstance;
43102     }
43103 
43104     private final com.google.protobuf.UnknownFieldSet unknownFields;
43105     @java.lang.Override
43106     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()43107         getUnknownFields() {
43108       return this.unknownFields;
43109     }
GetClusterStatusResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43110     private GetClusterStatusResponse(
43111         com.google.protobuf.CodedInputStream input,
43112         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43113         throws com.google.protobuf.InvalidProtocolBufferException {
43114       initFields();
43115       int mutable_bitField0_ = 0;
43116       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
43117           com.google.protobuf.UnknownFieldSet.newBuilder();
43118       try {
43119         boolean done = false;
43120         while (!done) {
43121           int tag = input.readTag();
43122           switch (tag) {
43123             case 0:
43124               done = true;
43125               break;
43126             default: {
43127               if (!parseUnknownField(input, unknownFields,
43128                                      extensionRegistry, tag)) {
43129                 done = true;
43130               }
43131               break;
43132             }
43133             case 10: {
43134               org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder subBuilder = null;
43135               if (((bitField0_ & 0x00000001) == 0x00000001)) {
43136                 subBuilder = clusterStatus_.toBuilder();
43137               }
43138               clusterStatus_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.PARSER, extensionRegistry);
43139               if (subBuilder != null) {
43140                 subBuilder.mergeFrom(clusterStatus_);
43141                 clusterStatus_ = subBuilder.buildPartial();
43142               }
43143               bitField0_ |= 0x00000001;
43144               break;
43145             }
43146           }
43147         }
43148       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
43149         throw e.setUnfinishedMessage(this);
43150       } catch (java.io.IOException e) {
43151         throw new com.google.protobuf.InvalidProtocolBufferException(
43152             e.getMessage()).setUnfinishedMessage(this);
43153       } finally {
43154         this.unknownFields = unknownFields.build();
43155         makeExtensionsImmutable();
43156       }
43157     }
43158     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()43159         getDescriptor() {
43160       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_descriptor;
43161     }
43162 
43163     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()43164         internalGetFieldAccessorTable() {
43165       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable
43166           .ensureFieldAccessorsInitialized(
43167               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.Builder.class);
43168     }
43169 
43170     public static com.google.protobuf.Parser<GetClusterStatusResponse> PARSER =
43171         new com.google.protobuf.AbstractParser<GetClusterStatusResponse>() {
43172       public GetClusterStatusResponse parsePartialFrom(
43173           com.google.protobuf.CodedInputStream input,
43174           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43175           throws com.google.protobuf.InvalidProtocolBufferException {
43176         return new GetClusterStatusResponse(input, extensionRegistry);
43177       }
43178     };
43179 
43180     @java.lang.Override
getParserForType()43181     public com.google.protobuf.Parser<GetClusterStatusResponse> getParserForType() {
43182       return PARSER;
43183     }
43184 
43185     private int bitField0_;
43186     // required .ClusterStatus cluster_status = 1;
43187     public static final int CLUSTER_STATUS_FIELD_NUMBER = 1;
43188     private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_;
43189     /**
43190      * <code>required .ClusterStatus cluster_status = 1;</code>
43191      */
hasClusterStatus()43192     public boolean hasClusterStatus() {
43193       return ((bitField0_ & 0x00000001) == 0x00000001);
43194     }
43195     /**
43196      * <code>required .ClusterStatus cluster_status = 1;</code>
43197      */
getClusterStatus()43198     public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() {
43199       return clusterStatus_;
43200     }
43201     /**
43202      * <code>required .ClusterStatus cluster_status = 1;</code>
43203      */
getClusterStatusOrBuilder()43204     public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() {
43205       return clusterStatus_;
43206     }
43207 
initFields()43208     private void initFields() {
43209       clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance();
43210     }
43211     private byte memoizedIsInitialized = -1;
isInitialized()43212     public final boolean isInitialized() {
43213       byte isInitialized = memoizedIsInitialized;
43214       if (isInitialized != -1) return isInitialized == 1;
43215 
43216       if (!hasClusterStatus()) {
43217         memoizedIsInitialized = 0;
43218         return false;
43219       }
43220       if (!getClusterStatus().isInitialized()) {
43221         memoizedIsInitialized = 0;
43222         return false;
43223       }
43224       memoizedIsInitialized = 1;
43225       return true;
43226     }
43227 
writeTo(com.google.protobuf.CodedOutputStream output)43228     public void writeTo(com.google.protobuf.CodedOutputStream output)
43229                         throws java.io.IOException {
43230       getSerializedSize();
43231       if (((bitField0_ & 0x00000001) == 0x00000001)) {
43232         output.writeMessage(1, clusterStatus_);
43233       }
43234       getUnknownFields().writeTo(output);
43235     }
43236 
43237     private int memoizedSerializedSize = -1;
getSerializedSize()43238     public int getSerializedSize() {
43239       int size = memoizedSerializedSize;
43240       if (size != -1) return size;
43241 
43242       size = 0;
43243       if (((bitField0_ & 0x00000001) == 0x00000001)) {
43244         size += com.google.protobuf.CodedOutputStream
43245           .computeMessageSize(1, clusterStatus_);
43246       }
43247       size += getUnknownFields().getSerializedSize();
43248       memoizedSerializedSize = size;
43249       return size;
43250     }
43251 
43252     private static final long serialVersionUID = 0L;
43253     @java.lang.Override
writeReplace()43254     protected java.lang.Object writeReplace()
43255         throws java.io.ObjectStreamException {
43256       return super.writeReplace();
43257     }
43258 
43259     @java.lang.Override
equals(final java.lang.Object obj)43260     public boolean equals(final java.lang.Object obj) {
43261       if (obj == this) {
43262        return true;
43263       }
43264       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse)) {
43265         return super.equals(obj);
43266       }
43267       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) obj;
43268 
43269       boolean result = true;
43270       result = result && (hasClusterStatus() == other.hasClusterStatus());
43271       if (hasClusterStatus()) {
43272         result = result && getClusterStatus()
43273             .equals(other.getClusterStatus());
43274       }
43275       result = result &&
43276           getUnknownFields().equals(other.getUnknownFields());
43277       return result;
43278     }
43279 
43280     private int memoizedHashCode = 0;
43281     @java.lang.Override
hashCode()43282     public int hashCode() {
43283       if (memoizedHashCode != 0) {
43284         return memoizedHashCode;
43285       }
43286       int hash = 41;
43287       hash = (19 * hash) + getDescriptorForType().hashCode();
43288       if (hasClusterStatus()) {
43289         hash = (37 * hash) + CLUSTER_STATUS_FIELD_NUMBER;
43290         hash = (53 * hash) + getClusterStatus().hashCode();
43291       }
43292       hash = (29 * hash) + getUnknownFields().hashCode();
43293       memoizedHashCode = hash;
43294       return hash;
43295     }
43296 
parseFrom( com.google.protobuf.ByteString data)43297     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(
43298         com.google.protobuf.ByteString data)
43299         throws com.google.protobuf.InvalidProtocolBufferException {
43300       return PARSER.parseFrom(data);
43301     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43302     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(
43303         com.google.protobuf.ByteString data,
43304         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43305         throws com.google.protobuf.InvalidProtocolBufferException {
43306       return PARSER.parseFrom(data, extensionRegistry);
43307     }
parseFrom(byte[] data)43308     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(byte[] data)
43309         throws com.google.protobuf.InvalidProtocolBufferException {
43310       return PARSER.parseFrom(data);
43311     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43312     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(
43313         byte[] data,
43314         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43315         throws com.google.protobuf.InvalidProtocolBufferException {
43316       return PARSER.parseFrom(data, extensionRegistry);
43317     }
parseFrom(java.io.InputStream input)43318     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(java.io.InputStream input)
43319         throws java.io.IOException {
43320       return PARSER.parseFrom(input);
43321     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43322     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(
43323         java.io.InputStream input,
43324         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43325         throws java.io.IOException {
43326       return PARSER.parseFrom(input, extensionRegistry);
43327     }
parseDelimitedFrom(java.io.InputStream input)43328     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseDelimitedFrom(java.io.InputStream input)
43329         throws java.io.IOException {
43330       return PARSER.parseDelimitedFrom(input);
43331     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43332     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseDelimitedFrom(
43333         java.io.InputStream input,
43334         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43335         throws java.io.IOException {
43336       return PARSER.parseDelimitedFrom(input, extensionRegistry);
43337     }
parseFrom( com.google.protobuf.CodedInputStream input)43338     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(
43339         com.google.protobuf.CodedInputStream input)
43340         throws java.io.IOException {
43341       return PARSER.parseFrom(input);
43342     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43343     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parseFrom(
43344         com.google.protobuf.CodedInputStream input,
43345         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43346         throws java.io.IOException {
43347       return PARSER.parseFrom(input, extensionRegistry);
43348     }
43349 
newBuilder()43350     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()43351     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse prototype)43352     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse prototype) {
43353       return newBuilder().mergeFrom(prototype);
43354     }
toBuilder()43355     public Builder toBuilder() { return newBuilder(this); }
43356 
43357     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)43358     protected Builder newBuilderForType(
43359         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
43360       Builder builder = new Builder(parent);
43361       return builder;
43362     }
43363     /**
43364      * Protobuf type {@code GetClusterStatusResponse}
43365      */
43366     public static final class Builder extends
43367         com.google.protobuf.GeneratedMessage.Builder<Builder>
43368        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponseOrBuilder {
43369       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()43370           getDescriptor() {
43371         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_descriptor;
43372       }
43373 
43374       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()43375           internalGetFieldAccessorTable() {
43376         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_fieldAccessorTable
43377             .ensureFieldAccessorsInitialized(
43378                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.Builder.class);
43379       }
43380 
43381       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.newBuilder()
Builder()43382       private Builder() {
43383         maybeForceBuilderInitialization();
43384       }
43385 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)43386       private Builder(
43387           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
43388         super(parent);
43389         maybeForceBuilderInitialization();
43390       }
maybeForceBuilderInitialization()43391       private void maybeForceBuilderInitialization() {
43392         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
43393           getClusterStatusFieldBuilder();
43394         }
43395       }
create()43396       private static Builder create() {
43397         return new Builder();
43398       }
43399 
clear()43400       public Builder clear() {
43401         super.clear();
43402         if (clusterStatusBuilder_ == null) {
43403           clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance();
43404         } else {
43405           clusterStatusBuilder_.clear();
43406         }
43407         bitField0_ = (bitField0_ & ~0x00000001);
43408         return this;
43409       }
43410 
clone()43411       public Builder clone() {
43412         return create().mergeFrom(buildPartial());
43413       }
43414 
43415       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()43416           getDescriptorForType() {
43417         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetClusterStatusResponse_descriptor;
43418       }
43419 
getDefaultInstanceForType()43420       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse getDefaultInstanceForType() {
43421         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance();
43422       }
43423 
build()43424       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse build() {
43425         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse result = buildPartial();
43426         if (!result.isInitialized()) {
43427           throw newUninitializedMessageException(result);
43428         }
43429         return result;
43430       }
43431 
buildPartial()43432       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse buildPartial() {
43433         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse(this);
43434         int from_bitField0_ = bitField0_;
43435         int to_bitField0_ = 0;
43436         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
43437           to_bitField0_ |= 0x00000001;
43438         }
43439         if (clusterStatusBuilder_ == null) {
43440           result.clusterStatus_ = clusterStatus_;
43441         } else {
43442           result.clusterStatus_ = clusterStatusBuilder_.build();
43443         }
43444         result.bitField0_ = to_bitField0_;
43445         onBuilt();
43446         return result;
43447       }
43448 
mergeFrom(com.google.protobuf.Message other)43449       public Builder mergeFrom(com.google.protobuf.Message other) {
43450         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) {
43451           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse)other);
43452         } else {
43453           super.mergeFrom(other);
43454           return this;
43455         }
43456       }
43457 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse other)43458       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse other) {
43459         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance()) return this;
43460         if (other.hasClusterStatus()) {
43461           mergeClusterStatus(other.getClusterStatus());
43462         }
43463         this.mergeUnknownFields(other.getUnknownFields());
43464         return this;
43465       }
43466 
isInitialized()43467       public final boolean isInitialized() {
43468         if (!hasClusterStatus()) {
43469 
43470           return false;
43471         }
43472         if (!getClusterStatus().isInitialized()) {
43473 
43474           return false;
43475         }
43476         return true;
43477       }
43478 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43479       public Builder mergeFrom(
43480           com.google.protobuf.CodedInputStream input,
43481           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43482           throws java.io.IOException {
43483         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse parsedMessage = null;
43484         try {
43485           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
43486         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
43487           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) e.getUnfinishedMessage();
43488           throw e;
43489         } finally {
43490           if (parsedMessage != null) {
43491             mergeFrom(parsedMessage);
43492           }
43493         }
43494         return this;
43495       }
43496       private int bitField0_;
43497 
43498       // required .ClusterStatus cluster_status = 1;
43499       private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance();
43500       private com.google.protobuf.SingleFieldBuilder<
43501           org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder> clusterStatusBuilder_;
43502       /**
43503        * <code>required .ClusterStatus cluster_status = 1;</code>
43504        */
hasClusterStatus()43505       public boolean hasClusterStatus() {
43506         return ((bitField0_ & 0x00000001) == 0x00000001);
43507       }
43508       /**
43509        * <code>required .ClusterStatus cluster_status = 1;</code>
43510        */
getClusterStatus()43511       public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getClusterStatus() {
43512         if (clusterStatusBuilder_ == null) {
43513           return clusterStatus_;
43514         } else {
43515           return clusterStatusBuilder_.getMessage();
43516         }
43517       }
43518       /**
43519        * <code>required .ClusterStatus cluster_status = 1;</code>
43520        */
setClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value)43521       public Builder setClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) {
43522         if (clusterStatusBuilder_ == null) {
43523           if (value == null) {
43524             throw new NullPointerException();
43525           }
43526           clusterStatus_ = value;
43527           onChanged();
43528         } else {
43529           clusterStatusBuilder_.setMessage(value);
43530         }
43531         bitField0_ |= 0x00000001;
43532         return this;
43533       }
43534       /**
43535        * <code>required .ClusterStatus cluster_status = 1;</code>
43536        */
setClusterStatus( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder builderForValue)43537       public Builder setClusterStatus(
43538           org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder builderForValue) {
43539         if (clusterStatusBuilder_ == null) {
43540           clusterStatus_ = builderForValue.build();
43541           onChanged();
43542         } else {
43543           clusterStatusBuilder_.setMessage(builderForValue.build());
43544         }
43545         bitField0_ |= 0x00000001;
43546         return this;
43547       }
43548       /**
43549        * <code>required .ClusterStatus cluster_status = 1;</code>
43550        */
mergeClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value)43551       public Builder mergeClusterStatus(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus value) {
43552         if (clusterStatusBuilder_ == null) {
43553           if (((bitField0_ & 0x00000001) == 0x00000001) &&
43554               clusterStatus_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) {
43555             clusterStatus_ =
43556               org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder(clusterStatus_).mergeFrom(value).buildPartial();
43557           } else {
43558             clusterStatus_ = value;
43559           }
43560           onChanged();
43561         } else {
43562           clusterStatusBuilder_.mergeFrom(value);
43563         }
43564         bitField0_ |= 0x00000001;
43565         return this;
43566       }
43567       /**
43568        * <code>required .ClusterStatus cluster_status = 1;</code>
43569        */
clearClusterStatus()43570       public Builder clearClusterStatus() {
43571         if (clusterStatusBuilder_ == null) {
43572           clusterStatus_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance();
43573           onChanged();
43574         } else {
43575           clusterStatusBuilder_.clear();
43576         }
43577         bitField0_ = (bitField0_ & ~0x00000001);
43578         return this;
43579       }
43580       /**
43581        * <code>required .ClusterStatus cluster_status = 1;</code>
43582        */
getClusterStatusBuilder()43583       public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder getClusterStatusBuilder() {
43584         bitField0_ |= 0x00000001;
43585         onChanged();
43586         return getClusterStatusFieldBuilder().getBuilder();
43587       }
43588       /**
43589        * <code>required .ClusterStatus cluster_status = 1;</code>
43590        */
getClusterStatusOrBuilder()43591       public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder getClusterStatusOrBuilder() {
43592         if (clusterStatusBuilder_ != null) {
43593           return clusterStatusBuilder_.getMessageOrBuilder();
43594         } else {
43595           return clusterStatus_;
43596         }
43597       }
43598       /**
43599        * <code>required .ClusterStatus cluster_status = 1;</code>
43600        */
43601       private com.google.protobuf.SingleFieldBuilder<
43602           org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder>
getClusterStatusFieldBuilder()43603           getClusterStatusFieldBuilder() {
43604         if (clusterStatusBuilder_ == null) {
43605           clusterStatusBuilder_ = new com.google.protobuf.SingleFieldBuilder<
43606               org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder>(
43607                   clusterStatus_,
43608                   getParentForChildren(),
43609                   isClean());
43610           clusterStatus_ = null;
43611         }
43612         return clusterStatusBuilder_;
43613       }
43614 
43615       // @@protoc_insertion_point(builder_scope:GetClusterStatusResponse)
43616     }
43617 
43618     static {
43619       defaultInstance = new GetClusterStatusResponse(true);
defaultInstance.initFields()43620       defaultInstance.initFields();
43621     }
43622 
43623     // @@protoc_insertion_point(class_scope:GetClusterStatusResponse)
43624   }
43625 
43626   public interface IsMasterRunningRequestOrBuilder
43627       extends com.google.protobuf.MessageOrBuilder {
43628   }
43629   /**
43630    * Protobuf type {@code IsMasterRunningRequest}
43631    */
43632   public static final class IsMasterRunningRequest extends
43633       com.google.protobuf.GeneratedMessage
43634       implements IsMasterRunningRequestOrBuilder {
43635     // Use IsMasterRunningRequest.newBuilder() to construct.
IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)43636     private IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
43637       super(builder);
43638       this.unknownFields = builder.getUnknownFields();
43639     }
IsMasterRunningRequest(boolean noInit)43640     private IsMasterRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
43641 
43642     private static final IsMasterRunningRequest defaultInstance;
getDefaultInstance()43643     public static IsMasterRunningRequest getDefaultInstance() {
43644       return defaultInstance;
43645     }
43646 
getDefaultInstanceForType()43647     public IsMasterRunningRequest getDefaultInstanceForType() {
43648       return defaultInstance;
43649     }
43650 
43651     private final com.google.protobuf.UnknownFieldSet unknownFields;
43652     @java.lang.Override
43653     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()43654         getUnknownFields() {
43655       return this.unknownFields;
43656     }
IsMasterRunningRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43657     private IsMasterRunningRequest(
43658         com.google.protobuf.CodedInputStream input,
43659         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43660         throws com.google.protobuf.InvalidProtocolBufferException {
43661       initFields();
43662       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
43663           com.google.protobuf.UnknownFieldSet.newBuilder();
43664       try {
43665         boolean done = false;
43666         while (!done) {
43667           int tag = input.readTag();
43668           switch (tag) {
43669             case 0:
43670               done = true;
43671               break;
43672             default: {
43673               if (!parseUnknownField(input, unknownFields,
43674                                      extensionRegistry, tag)) {
43675                 done = true;
43676               }
43677               break;
43678             }
43679           }
43680         }
43681       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
43682         throw e.setUnfinishedMessage(this);
43683       } catch (java.io.IOException e) {
43684         throw new com.google.protobuf.InvalidProtocolBufferException(
43685             e.getMessage()).setUnfinishedMessage(this);
43686       } finally {
43687         this.unknownFields = unknownFields.build();
43688         makeExtensionsImmutable();
43689       }
43690     }
43691     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()43692         getDescriptor() {
43693       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
43694     }
43695 
43696     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()43697         internalGetFieldAccessorTable() {
43698       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable
43699           .ensureFieldAccessorsInitialized(
43700               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
43701     }
43702 
43703     public static com.google.protobuf.Parser<IsMasterRunningRequest> PARSER =
43704         new com.google.protobuf.AbstractParser<IsMasterRunningRequest>() {
43705       public IsMasterRunningRequest parsePartialFrom(
43706           com.google.protobuf.CodedInputStream input,
43707           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43708           throws com.google.protobuf.InvalidProtocolBufferException {
43709         return new IsMasterRunningRequest(input, extensionRegistry);
43710       }
43711     };
43712 
43713     @java.lang.Override
getParserForType()43714     public com.google.protobuf.Parser<IsMasterRunningRequest> getParserForType() {
43715       return PARSER;
43716     }
43717 
initFields()43718     private void initFields() {
43719     }
43720     private byte memoizedIsInitialized = -1;
isInitialized()43721     public final boolean isInitialized() {
43722       byte isInitialized = memoizedIsInitialized;
43723       if (isInitialized != -1) return isInitialized == 1;
43724 
43725       memoizedIsInitialized = 1;
43726       return true;
43727     }
43728 
writeTo(com.google.protobuf.CodedOutputStream output)43729     public void writeTo(com.google.protobuf.CodedOutputStream output)
43730                         throws java.io.IOException {
43731       getSerializedSize();
43732       getUnknownFields().writeTo(output);
43733     }
43734 
43735     private int memoizedSerializedSize = -1;
getSerializedSize()43736     public int getSerializedSize() {
43737       int size = memoizedSerializedSize;
43738       if (size != -1) return size;
43739 
43740       size = 0;
43741       size += getUnknownFields().getSerializedSize();
43742       memoizedSerializedSize = size;
43743       return size;
43744     }
43745 
43746     private static final long serialVersionUID = 0L;
43747     @java.lang.Override
writeReplace()43748     protected java.lang.Object writeReplace()
43749         throws java.io.ObjectStreamException {
43750       return super.writeReplace();
43751     }
43752 
43753     @java.lang.Override
equals(final java.lang.Object obj)43754     public boolean equals(final java.lang.Object obj) {
43755       if (obj == this) {
43756        return true;
43757       }
43758       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)) {
43759         return super.equals(obj);
43760       }
43761       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) obj;
43762 
43763       boolean result = true;
43764       result = result &&
43765           getUnknownFields().equals(other.getUnknownFields());
43766       return result;
43767     }
43768 
43769     private int memoizedHashCode = 0;
43770     @java.lang.Override
hashCode()43771     public int hashCode() {
43772       if (memoizedHashCode != 0) {
43773         return memoizedHashCode;
43774       }
43775       int hash = 41;
43776       hash = (19 * hash) + getDescriptorForType().hashCode();
43777       hash = (29 * hash) + getUnknownFields().hashCode();
43778       memoizedHashCode = hash;
43779       return hash;
43780     }
43781 
parseFrom( com.google.protobuf.ByteString data)43782     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
43783         com.google.protobuf.ByteString data)
43784         throws com.google.protobuf.InvalidProtocolBufferException {
43785       return PARSER.parseFrom(data);
43786     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43787     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
43788         com.google.protobuf.ByteString data,
43789         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43790         throws com.google.protobuf.InvalidProtocolBufferException {
43791       return PARSER.parseFrom(data, extensionRegistry);
43792     }
parseFrom(byte[] data)43793     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data)
43794         throws com.google.protobuf.InvalidProtocolBufferException {
43795       return PARSER.parseFrom(data);
43796     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43797     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
43798         byte[] data,
43799         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43800         throws com.google.protobuf.InvalidProtocolBufferException {
43801       return PARSER.parseFrom(data, extensionRegistry);
43802     }
parseFrom(java.io.InputStream input)43803     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input)
43804         throws java.io.IOException {
43805       return PARSER.parseFrom(input);
43806     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43807     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
43808         java.io.InputStream input,
43809         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43810         throws java.io.IOException {
43811       return PARSER.parseFrom(input, extensionRegistry);
43812     }
parseDelimitedFrom(java.io.InputStream input)43813     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input)
43814         throws java.io.IOException {
43815       return PARSER.parseDelimitedFrom(input);
43816     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43817     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(
43818         java.io.InputStream input,
43819         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43820         throws java.io.IOException {
43821       return PARSER.parseDelimitedFrom(input, extensionRegistry);
43822     }
parseFrom( com.google.protobuf.CodedInputStream input)43823     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
43824         com.google.protobuf.CodedInputStream input)
43825         throws java.io.IOException {
43826       return PARSER.parseFrom(input);
43827     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43828     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
43829         com.google.protobuf.CodedInputStream input,
43830         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43831         throws java.io.IOException {
43832       return PARSER.parseFrom(input, extensionRegistry);
43833     }
43834 
newBuilder()43835     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()43836     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype)43837     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest prototype) {
43838       return newBuilder().mergeFrom(prototype);
43839     }
toBuilder()43840     public Builder toBuilder() { return newBuilder(this); }
43841 
43842     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)43843     protected Builder newBuilderForType(
43844         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
43845       Builder builder = new Builder(parent);
43846       return builder;
43847     }
43848     /**
43849      * Protobuf type {@code IsMasterRunningRequest}
43850      */
43851     public static final class Builder extends
43852         com.google.protobuf.GeneratedMessage.Builder<Builder>
43853        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder {
43854       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()43855           getDescriptor() {
43856         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
43857       }
43858 
43859       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()43860           internalGetFieldAccessorTable() {
43861         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable
43862             .ensureFieldAccessorsInitialized(
43863                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
43864       }
43865 
43866       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder()
Builder()43867       private Builder() {
43868         maybeForceBuilderInitialization();
43869       }
43870 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)43871       private Builder(
43872           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
43873         super(parent);
43874         maybeForceBuilderInitialization();
43875       }
maybeForceBuilderInitialization()43876       private void maybeForceBuilderInitialization() {
43877         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
43878         }
43879       }
create()43880       private static Builder create() {
43881         return new Builder();
43882       }
43883 
clear()43884       public Builder clear() {
43885         super.clear();
43886         return this;
43887       }
43888 
clone()43889       public Builder clone() {
43890         return create().mergeFrom(buildPartial());
43891       }
43892 
43893       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()43894           getDescriptorForType() {
43895         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
43896       }
43897 
getDefaultInstanceForType()43898       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() {
43899         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance();
43900       }
43901 
build()43902       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest build() {
43903         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial();
43904         if (!result.isInitialized()) {
43905           throw newUninitializedMessageException(result);
43906         }
43907         return result;
43908       }
43909 
buildPartial()43910       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() {
43911         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this);
43912         onBuilt();
43913         return result;
43914       }
43915 
mergeFrom(com.google.protobuf.Message other)43916       public Builder mergeFrom(com.google.protobuf.Message other) {
43917         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) {
43918           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)other);
43919         } else {
43920           super.mergeFrom(other);
43921           return this;
43922         }
43923       }
43924 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other)43925       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest other) {
43926         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance()) return this;
43927         this.mergeUnknownFields(other.getUnknownFields());
43928         return this;
43929       }
43930 
isInitialized()43931       public final boolean isInitialized() {
43932         return true;
43933       }
43934 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)43935       public Builder mergeFrom(
43936           com.google.protobuf.CodedInputStream input,
43937           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
43938           throws java.io.IOException {
43939         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parsedMessage = null;
43940         try {
43941           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
43942         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
43943           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) e.getUnfinishedMessage();
43944           throw e;
43945         } finally {
43946           if (parsedMessage != null) {
43947             mergeFrom(parsedMessage);
43948           }
43949         }
43950         return this;
43951       }
43952 
43953       // @@protoc_insertion_point(builder_scope:IsMasterRunningRequest)
43954     }
43955 
43956     static {
43957       defaultInstance = new IsMasterRunningRequest(true);
defaultInstance.initFields()43958       defaultInstance.initFields();
43959     }
43960 
43961     // @@protoc_insertion_point(class_scope:IsMasterRunningRequest)
43962   }
43963 
43964   public interface IsMasterRunningResponseOrBuilder
43965       extends com.google.protobuf.MessageOrBuilder {
43966 
43967     // required bool is_master_running = 1;
43968     /**
43969      * <code>required bool is_master_running = 1;</code>
43970      */
hasIsMasterRunning()43971     boolean hasIsMasterRunning();
43972     /**
43973      * <code>required bool is_master_running = 1;</code>
43974      */
getIsMasterRunning()43975     boolean getIsMasterRunning();
43976   }
43977   /**
43978    * Protobuf type {@code IsMasterRunningResponse}
43979    */
43980   public static final class IsMasterRunningResponse extends
43981       com.google.protobuf.GeneratedMessage
43982       implements IsMasterRunningResponseOrBuilder {
43983     // Use IsMasterRunningResponse.newBuilder() to construct.
IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)43984     private IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
43985       super(builder);
43986       this.unknownFields = builder.getUnknownFields();
43987     }
IsMasterRunningResponse(boolean noInit)43988     private IsMasterRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
43989 
43990     private static final IsMasterRunningResponse defaultInstance;
getDefaultInstance()43991     public static IsMasterRunningResponse getDefaultInstance() {
43992       return defaultInstance;
43993     }
43994 
getDefaultInstanceForType()43995     public IsMasterRunningResponse getDefaultInstanceForType() {
43996       return defaultInstance;
43997     }
43998 
43999     private final com.google.protobuf.UnknownFieldSet unknownFields;
44000     @java.lang.Override
44001     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()44002         getUnknownFields() {
44003       return this.unknownFields;
44004     }
IsMasterRunningResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44005     private IsMasterRunningResponse(
44006         com.google.protobuf.CodedInputStream input,
44007         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44008         throws com.google.protobuf.InvalidProtocolBufferException {
44009       initFields();
44010       int mutable_bitField0_ = 0;
44011       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
44012           com.google.protobuf.UnknownFieldSet.newBuilder();
44013       try {
44014         boolean done = false;
44015         while (!done) {
44016           int tag = input.readTag();
44017           switch (tag) {
44018             case 0:
44019               done = true;
44020               break;
44021             default: {
44022               if (!parseUnknownField(input, unknownFields,
44023                                      extensionRegistry, tag)) {
44024                 done = true;
44025               }
44026               break;
44027             }
44028             case 8: {
44029               bitField0_ |= 0x00000001;
44030               isMasterRunning_ = input.readBool();
44031               break;
44032             }
44033           }
44034         }
44035       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
44036         throw e.setUnfinishedMessage(this);
44037       } catch (java.io.IOException e) {
44038         throw new com.google.protobuf.InvalidProtocolBufferException(
44039             e.getMessage()).setUnfinishedMessage(this);
44040       } finally {
44041         this.unknownFields = unknownFields.build();
44042         makeExtensionsImmutable();
44043       }
44044     }
44045     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()44046         getDescriptor() {
44047       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
44048     }
44049 
44050     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()44051         internalGetFieldAccessorTable() {
44052       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable
44053           .ensureFieldAccessorsInitialized(
44054               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
44055     }
44056 
44057     public static com.google.protobuf.Parser<IsMasterRunningResponse> PARSER =
44058         new com.google.protobuf.AbstractParser<IsMasterRunningResponse>() {
44059       public IsMasterRunningResponse parsePartialFrom(
44060           com.google.protobuf.CodedInputStream input,
44061           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44062           throws com.google.protobuf.InvalidProtocolBufferException {
44063         return new IsMasterRunningResponse(input, extensionRegistry);
44064       }
44065     };
44066 
44067     @java.lang.Override
getParserForType()44068     public com.google.protobuf.Parser<IsMasterRunningResponse> getParserForType() {
44069       return PARSER;
44070     }
44071 
44072     private int bitField0_;
44073     // required bool is_master_running = 1;
44074     public static final int IS_MASTER_RUNNING_FIELD_NUMBER = 1;
44075     private boolean isMasterRunning_;
44076     /**
44077      * <code>required bool is_master_running = 1;</code>
44078      */
hasIsMasterRunning()44079     public boolean hasIsMasterRunning() {
44080       return ((bitField0_ & 0x00000001) == 0x00000001);
44081     }
44082     /**
44083      * <code>required bool is_master_running = 1;</code>
44084      */
getIsMasterRunning()44085     public boolean getIsMasterRunning() {
44086       return isMasterRunning_;
44087     }
44088 
initFields()44089     private void initFields() {
44090       isMasterRunning_ = false;
44091     }
44092     private byte memoizedIsInitialized = -1;
isInitialized()44093     public final boolean isInitialized() {
44094       byte isInitialized = memoizedIsInitialized;
44095       if (isInitialized != -1) return isInitialized == 1;
44096 
44097       if (!hasIsMasterRunning()) {
44098         memoizedIsInitialized = 0;
44099         return false;
44100       }
44101       memoizedIsInitialized = 1;
44102       return true;
44103     }
44104 
writeTo(com.google.protobuf.CodedOutputStream output)44105     public void writeTo(com.google.protobuf.CodedOutputStream output)
44106                         throws java.io.IOException {
44107       getSerializedSize();
44108       if (((bitField0_ & 0x00000001) == 0x00000001)) {
44109         output.writeBool(1, isMasterRunning_);
44110       }
44111       getUnknownFields().writeTo(output);
44112     }
44113 
44114     private int memoizedSerializedSize = -1;
getSerializedSize()44115     public int getSerializedSize() {
44116       int size = memoizedSerializedSize;
44117       if (size != -1) return size;
44118 
44119       size = 0;
44120       if (((bitField0_ & 0x00000001) == 0x00000001)) {
44121         size += com.google.protobuf.CodedOutputStream
44122           .computeBoolSize(1, isMasterRunning_);
44123       }
44124       size += getUnknownFields().getSerializedSize();
44125       memoizedSerializedSize = size;
44126       return size;
44127     }
44128 
44129     private static final long serialVersionUID = 0L;
44130     @java.lang.Override
writeReplace()44131     protected java.lang.Object writeReplace()
44132         throws java.io.ObjectStreamException {
44133       return super.writeReplace();
44134     }
44135 
44136     @java.lang.Override
equals(final java.lang.Object obj)44137     public boolean equals(final java.lang.Object obj) {
44138       if (obj == this) {
44139        return true;
44140       }
44141       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)) {
44142         return super.equals(obj);
44143       }
44144       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) obj;
44145 
44146       boolean result = true;
44147       result = result && (hasIsMasterRunning() == other.hasIsMasterRunning());
44148       if (hasIsMasterRunning()) {
44149         result = result && (getIsMasterRunning()
44150             == other.getIsMasterRunning());
44151       }
44152       result = result &&
44153           getUnknownFields().equals(other.getUnknownFields());
44154       return result;
44155     }
44156 
44157     private int memoizedHashCode = 0;
44158     @java.lang.Override
hashCode()44159     public int hashCode() {
44160       if (memoizedHashCode != 0) {
44161         return memoizedHashCode;
44162       }
44163       int hash = 41;
44164       hash = (19 * hash) + getDescriptorForType().hashCode();
44165       if (hasIsMasterRunning()) {
44166         hash = (37 * hash) + IS_MASTER_RUNNING_FIELD_NUMBER;
44167         hash = (53 * hash) + hashBoolean(getIsMasterRunning());
44168       }
44169       hash = (29 * hash) + getUnknownFields().hashCode();
44170       memoizedHashCode = hash;
44171       return hash;
44172     }
44173 
parseFrom( com.google.protobuf.ByteString data)44174     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
44175         com.google.protobuf.ByteString data)
44176         throws com.google.protobuf.InvalidProtocolBufferException {
44177       return PARSER.parseFrom(data);
44178     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44179     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
44180         com.google.protobuf.ByteString data,
44181         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44182         throws com.google.protobuf.InvalidProtocolBufferException {
44183       return PARSER.parseFrom(data, extensionRegistry);
44184     }
parseFrom(byte[] data)44185     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data)
44186         throws com.google.protobuf.InvalidProtocolBufferException {
44187       return PARSER.parseFrom(data);
44188     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44189     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
44190         byte[] data,
44191         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44192         throws com.google.protobuf.InvalidProtocolBufferException {
44193       return PARSER.parseFrom(data, extensionRegistry);
44194     }
parseFrom(java.io.InputStream input)44195     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input)
44196         throws java.io.IOException {
44197       return PARSER.parseFrom(input);
44198     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44199     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
44200         java.io.InputStream input,
44201         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44202         throws java.io.IOException {
44203       return PARSER.parseFrom(input, extensionRegistry);
44204     }
parseDelimitedFrom(java.io.InputStream input)44205     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input)
44206         throws java.io.IOException {
44207       return PARSER.parseDelimitedFrom(input);
44208     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44209     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(
44210         java.io.InputStream input,
44211         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44212         throws java.io.IOException {
44213       return PARSER.parseDelimitedFrom(input, extensionRegistry);
44214     }
parseFrom( com.google.protobuf.CodedInputStream input)44215     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
44216         com.google.protobuf.CodedInputStream input)
44217         throws java.io.IOException {
44218       return PARSER.parseFrom(input);
44219     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44220     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
44221         com.google.protobuf.CodedInputStream input,
44222         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44223         throws java.io.IOException {
44224       return PARSER.parseFrom(input, extensionRegistry);
44225     }
44226 
newBuilder()44227     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()44228     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype)44229     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse prototype) {
44230       return newBuilder().mergeFrom(prototype);
44231     }
toBuilder()44232     public Builder toBuilder() { return newBuilder(this); }
44233 
44234     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)44235     protected Builder newBuilderForType(
44236         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
44237       Builder builder = new Builder(parent);
44238       return builder;
44239     }
44240     /**
44241      * Protobuf type {@code IsMasterRunningResponse}
44242      */
44243     public static final class Builder extends
44244         com.google.protobuf.GeneratedMessage.Builder<Builder>
44245        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder {
44246       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()44247           getDescriptor() {
44248         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
44249       }
44250 
44251       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()44252           internalGetFieldAccessorTable() {
44253         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable
44254             .ensureFieldAccessorsInitialized(
44255                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
44256       }
44257 
44258       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder()
Builder()44259       private Builder() {
44260         maybeForceBuilderInitialization();
44261       }
44262 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)44263       private Builder(
44264           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
44265         super(parent);
44266         maybeForceBuilderInitialization();
44267       }
maybeForceBuilderInitialization()44268       private void maybeForceBuilderInitialization() {
44269         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
44270         }
44271       }
create()44272       private static Builder create() {
44273         return new Builder();
44274       }
44275 
clear()44276       public Builder clear() {
44277         super.clear();
44278         isMasterRunning_ = false;
44279         bitField0_ = (bitField0_ & ~0x00000001);
44280         return this;
44281       }
44282 
clone()44283       public Builder clone() {
44284         return create().mergeFrom(buildPartial());
44285       }
44286 
44287       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()44288           getDescriptorForType() {
44289         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
44290       }
44291 
getDefaultInstanceForType()44292       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() {
44293         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance();
44294       }
44295 
build()44296       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse build() {
44297         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial();
44298         if (!result.isInitialized()) {
44299           throw newUninitializedMessageException(result);
44300         }
44301         return result;
44302       }
44303 
buildPartial()44304       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() {
44305         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this);
44306         int from_bitField0_ = bitField0_;
44307         int to_bitField0_ = 0;
44308         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
44309           to_bitField0_ |= 0x00000001;
44310         }
44311         result.isMasterRunning_ = isMasterRunning_;
44312         result.bitField0_ = to_bitField0_;
44313         onBuilt();
44314         return result;
44315       }
44316 
mergeFrom(com.google.protobuf.Message other)44317       public Builder mergeFrom(com.google.protobuf.Message other) {
44318         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) {
44319           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse)other);
44320         } else {
44321           super.mergeFrom(other);
44322           return this;
44323         }
44324       }
44325 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other)44326       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse other) {
44327         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()) return this;
44328         if (other.hasIsMasterRunning()) {
44329           setIsMasterRunning(other.getIsMasterRunning());
44330         }
44331         this.mergeUnknownFields(other.getUnknownFields());
44332         return this;
44333       }
44334 
isInitialized()44335       public final boolean isInitialized() {
44336         if (!hasIsMasterRunning()) {
44337 
44338           return false;
44339         }
44340         return true;
44341       }
44342 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44343       public Builder mergeFrom(
44344           com.google.protobuf.CodedInputStream input,
44345           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44346           throws java.io.IOException {
44347         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parsedMessage = null;
44348         try {
44349           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
44350         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
44351           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) e.getUnfinishedMessage();
44352           throw e;
44353         } finally {
44354           if (parsedMessage != null) {
44355             mergeFrom(parsedMessage);
44356           }
44357         }
44358         return this;
44359       }
44360       private int bitField0_;
44361 
44362       // required bool is_master_running = 1;
44363       private boolean isMasterRunning_ ;
44364       /**
44365        * <code>required bool is_master_running = 1;</code>
44366        */
hasIsMasterRunning()44367       public boolean hasIsMasterRunning() {
44368         return ((bitField0_ & 0x00000001) == 0x00000001);
44369       }
44370       /**
44371        * <code>required bool is_master_running = 1;</code>
44372        */
getIsMasterRunning()44373       public boolean getIsMasterRunning() {
44374         return isMasterRunning_;
44375       }
44376       /**
44377        * <code>required bool is_master_running = 1;</code>
44378        */
setIsMasterRunning(boolean value)44379       public Builder setIsMasterRunning(boolean value) {
44380         bitField0_ |= 0x00000001;
44381         isMasterRunning_ = value;
44382         onChanged();
44383         return this;
44384       }
44385       /**
44386        * <code>required bool is_master_running = 1;</code>
44387        */
clearIsMasterRunning()44388       public Builder clearIsMasterRunning() {
44389         bitField0_ = (bitField0_ & ~0x00000001);
44390         isMasterRunning_ = false;
44391         onChanged();
44392         return this;
44393       }
44394 
44395       // @@protoc_insertion_point(builder_scope:IsMasterRunningResponse)
44396     }
44397 
44398     static {
44399       defaultInstance = new IsMasterRunningResponse(true);
defaultInstance.initFields()44400       defaultInstance.initFields();
44401     }
44402 
44403     // @@protoc_insertion_point(class_scope:IsMasterRunningResponse)
44404   }
44405 
44406   public interface ExecProcedureRequestOrBuilder
44407       extends com.google.protobuf.MessageOrBuilder {
44408 
44409     // required .ProcedureDescription procedure = 1;
44410     /**
44411      * <code>required .ProcedureDescription procedure = 1;</code>
44412      */
hasProcedure()44413     boolean hasProcedure();
44414     /**
44415      * <code>required .ProcedureDescription procedure = 1;</code>
44416      */
getProcedure()44417     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure();
44418     /**
44419      * <code>required .ProcedureDescription procedure = 1;</code>
44420      */
getProcedureOrBuilder()44421     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder();
44422   }
44423   /**
44424    * Protobuf type {@code ExecProcedureRequest}
44425    */
44426   public static final class ExecProcedureRequest extends
44427       com.google.protobuf.GeneratedMessage
44428       implements ExecProcedureRequestOrBuilder {
44429     // Use ExecProcedureRequest.newBuilder() to construct.
ExecProcedureRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)44430     private ExecProcedureRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
44431       super(builder);
44432       this.unknownFields = builder.getUnknownFields();
44433     }
ExecProcedureRequest(boolean noInit)44434     private ExecProcedureRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
44435 
44436     private static final ExecProcedureRequest defaultInstance;
getDefaultInstance()44437     public static ExecProcedureRequest getDefaultInstance() {
44438       return defaultInstance;
44439     }
44440 
getDefaultInstanceForType()44441     public ExecProcedureRequest getDefaultInstanceForType() {
44442       return defaultInstance;
44443     }
44444 
44445     private final com.google.protobuf.UnknownFieldSet unknownFields;
44446     @java.lang.Override
44447     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()44448         getUnknownFields() {
44449       return this.unknownFields;
44450     }
ExecProcedureRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44451     private ExecProcedureRequest(
44452         com.google.protobuf.CodedInputStream input,
44453         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44454         throws com.google.protobuf.InvalidProtocolBufferException {
44455       initFields();
44456       int mutable_bitField0_ = 0;
44457       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
44458           com.google.protobuf.UnknownFieldSet.newBuilder();
44459       try {
44460         boolean done = false;
44461         while (!done) {
44462           int tag = input.readTag();
44463           switch (tag) {
44464             case 0:
44465               done = true;
44466               break;
44467             default: {
44468               if (!parseUnknownField(input, unknownFields,
44469                                      extensionRegistry, tag)) {
44470                 done = true;
44471               }
44472               break;
44473             }
44474             case 10: {
44475               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder subBuilder = null;
44476               if (((bitField0_ & 0x00000001) == 0x00000001)) {
44477                 subBuilder = procedure_.toBuilder();
44478               }
44479               procedure_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.PARSER, extensionRegistry);
44480               if (subBuilder != null) {
44481                 subBuilder.mergeFrom(procedure_);
44482                 procedure_ = subBuilder.buildPartial();
44483               }
44484               bitField0_ |= 0x00000001;
44485               break;
44486             }
44487           }
44488         }
44489       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
44490         throw e.setUnfinishedMessage(this);
44491       } catch (java.io.IOException e) {
44492         throw new com.google.protobuf.InvalidProtocolBufferException(
44493             e.getMessage()).setUnfinishedMessage(this);
44494       } finally {
44495         this.unknownFields = unknownFields.build();
44496         makeExtensionsImmutable();
44497       }
44498     }
44499     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()44500         getDescriptor() {
44501       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_descriptor;
44502     }
44503 
44504     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()44505         internalGetFieldAccessorTable() {
44506       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_fieldAccessorTable
44507           .ensureFieldAccessorsInitialized(
44508               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.Builder.class);
44509     }
44510 
44511     public static com.google.protobuf.Parser<ExecProcedureRequest> PARSER =
44512         new com.google.protobuf.AbstractParser<ExecProcedureRequest>() {
44513       public ExecProcedureRequest parsePartialFrom(
44514           com.google.protobuf.CodedInputStream input,
44515           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44516           throws com.google.protobuf.InvalidProtocolBufferException {
44517         return new ExecProcedureRequest(input, extensionRegistry);
44518       }
44519     };
44520 
44521     @java.lang.Override
getParserForType()44522     public com.google.protobuf.Parser<ExecProcedureRequest> getParserForType() {
44523       return PARSER;
44524     }
44525 
44526     private int bitField0_;
44527     // required .ProcedureDescription procedure = 1;
44528     public static final int PROCEDURE_FIELD_NUMBER = 1;
44529     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_;
44530     /**
44531      * <code>required .ProcedureDescription procedure = 1;</code>
44532      */
hasProcedure()44533     public boolean hasProcedure() {
44534       return ((bitField0_ & 0x00000001) == 0x00000001);
44535     }
44536     /**
44537      * <code>required .ProcedureDescription procedure = 1;</code>
44538      */
getProcedure()44539     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() {
44540       return procedure_;
44541     }
44542     /**
44543      * <code>required .ProcedureDescription procedure = 1;</code>
44544      */
getProcedureOrBuilder()44545     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() {
44546       return procedure_;
44547     }
44548 
initFields()44549     private void initFields() {
44550       procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
44551     }
44552     private byte memoizedIsInitialized = -1;
isInitialized()44553     public final boolean isInitialized() {
44554       byte isInitialized = memoizedIsInitialized;
44555       if (isInitialized != -1) return isInitialized == 1;
44556 
44557       if (!hasProcedure()) {
44558         memoizedIsInitialized = 0;
44559         return false;
44560       }
44561       if (!getProcedure().isInitialized()) {
44562         memoizedIsInitialized = 0;
44563         return false;
44564       }
44565       memoizedIsInitialized = 1;
44566       return true;
44567     }
44568 
writeTo(com.google.protobuf.CodedOutputStream output)44569     public void writeTo(com.google.protobuf.CodedOutputStream output)
44570                         throws java.io.IOException {
44571       getSerializedSize();
44572       if (((bitField0_ & 0x00000001) == 0x00000001)) {
44573         output.writeMessage(1, procedure_);
44574       }
44575       getUnknownFields().writeTo(output);
44576     }
44577 
44578     private int memoizedSerializedSize = -1;
getSerializedSize()44579     public int getSerializedSize() {
44580       int size = memoizedSerializedSize;
44581       if (size != -1) return size;
44582 
44583       size = 0;
44584       if (((bitField0_ & 0x00000001) == 0x00000001)) {
44585         size += com.google.protobuf.CodedOutputStream
44586           .computeMessageSize(1, procedure_);
44587       }
44588       size += getUnknownFields().getSerializedSize();
44589       memoizedSerializedSize = size;
44590       return size;
44591     }
44592 
44593     private static final long serialVersionUID = 0L;
44594     @java.lang.Override
writeReplace()44595     protected java.lang.Object writeReplace()
44596         throws java.io.ObjectStreamException {
44597       return super.writeReplace();
44598     }
44599 
44600     @java.lang.Override
equals(final java.lang.Object obj)44601     public boolean equals(final java.lang.Object obj) {
44602       if (obj == this) {
44603        return true;
44604       }
44605       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)) {
44606         return super.equals(obj);
44607       }
44608       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest) obj;
44609 
44610       boolean result = true;
44611       result = result && (hasProcedure() == other.hasProcedure());
44612       if (hasProcedure()) {
44613         result = result && getProcedure()
44614             .equals(other.getProcedure());
44615       }
44616       result = result &&
44617           getUnknownFields().equals(other.getUnknownFields());
44618       return result;
44619     }
44620 
44621     private int memoizedHashCode = 0;
44622     @java.lang.Override
hashCode()44623     public int hashCode() {
44624       if (memoizedHashCode != 0) {
44625         return memoizedHashCode;
44626       }
44627       int hash = 41;
44628       hash = (19 * hash) + getDescriptorForType().hashCode();
44629       if (hasProcedure()) {
44630         hash = (37 * hash) + PROCEDURE_FIELD_NUMBER;
44631         hash = (53 * hash) + getProcedure().hashCode();
44632       }
44633       hash = (29 * hash) + getUnknownFields().hashCode();
44634       memoizedHashCode = hash;
44635       return hash;
44636     }
44637 
parseFrom( com.google.protobuf.ByteString data)44638     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(
44639         com.google.protobuf.ByteString data)
44640         throws com.google.protobuf.InvalidProtocolBufferException {
44641       return PARSER.parseFrom(data);
44642     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44643     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(
44644         com.google.protobuf.ByteString data,
44645         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44646         throws com.google.protobuf.InvalidProtocolBufferException {
44647       return PARSER.parseFrom(data, extensionRegistry);
44648     }
parseFrom(byte[] data)44649     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(byte[] data)
44650         throws com.google.protobuf.InvalidProtocolBufferException {
44651       return PARSER.parseFrom(data);
44652     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44653     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(
44654         byte[] data,
44655         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44656         throws com.google.protobuf.InvalidProtocolBufferException {
44657       return PARSER.parseFrom(data, extensionRegistry);
44658     }
parseFrom(java.io.InputStream input)44659     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(java.io.InputStream input)
44660         throws java.io.IOException {
44661       return PARSER.parseFrom(input);
44662     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44663     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(
44664         java.io.InputStream input,
44665         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44666         throws java.io.IOException {
44667       return PARSER.parseFrom(input, extensionRegistry);
44668     }
parseDelimitedFrom(java.io.InputStream input)44669     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseDelimitedFrom(java.io.InputStream input)
44670         throws java.io.IOException {
44671       return PARSER.parseDelimitedFrom(input);
44672     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44673     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseDelimitedFrom(
44674         java.io.InputStream input,
44675         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44676         throws java.io.IOException {
44677       return PARSER.parseDelimitedFrom(input, extensionRegistry);
44678     }
parseFrom( com.google.protobuf.CodedInputStream input)44679     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(
44680         com.google.protobuf.CodedInputStream input)
44681         throws java.io.IOException {
44682       return PARSER.parseFrom(input);
44683     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44684     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parseFrom(
44685         com.google.protobuf.CodedInputStream input,
44686         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44687         throws java.io.IOException {
44688       return PARSER.parseFrom(input, extensionRegistry);
44689     }
44690 
newBuilder()44691     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()44692     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest prototype)44693     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest prototype) {
44694       return newBuilder().mergeFrom(prototype);
44695     }
toBuilder()44696     public Builder toBuilder() { return newBuilder(this); }
44697 
44698     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)44699     protected Builder newBuilderForType(
44700         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
44701       Builder builder = new Builder(parent);
44702       return builder;
44703     }
44704     /**
44705      * Protobuf type {@code ExecProcedureRequest}
44706      */
44707     public static final class Builder extends
44708         com.google.protobuf.GeneratedMessage.Builder<Builder>
44709        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequestOrBuilder {
44710       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()44711           getDescriptor() {
44712         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_descriptor;
44713       }
44714 
44715       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()44716           internalGetFieldAccessorTable() {
44717         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_fieldAccessorTable
44718             .ensureFieldAccessorsInitialized(
44719                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.Builder.class);
44720       }
44721 
44722       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.newBuilder()
Builder()44723       private Builder() {
44724         maybeForceBuilderInitialization();
44725       }
44726 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)44727       private Builder(
44728           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
44729         super(parent);
44730         maybeForceBuilderInitialization();
44731       }
maybeForceBuilderInitialization()44732       private void maybeForceBuilderInitialization() {
44733         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
44734           getProcedureFieldBuilder();
44735         }
44736       }
create()44737       private static Builder create() {
44738         return new Builder();
44739       }
44740 
clear()44741       public Builder clear() {
44742         super.clear();
44743         if (procedureBuilder_ == null) {
44744           procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
44745         } else {
44746           procedureBuilder_.clear();
44747         }
44748         bitField0_ = (bitField0_ & ~0x00000001);
44749         return this;
44750       }
44751 
clone()44752       public Builder clone() {
44753         return create().mergeFrom(buildPartial());
44754       }
44755 
44756       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()44757           getDescriptorForType() {
44758         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureRequest_descriptor;
44759       }
44760 
getDefaultInstanceForType()44761       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest getDefaultInstanceForType() {
44762         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
44763       }
44764 
build()44765       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest build() {
44766         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest result = buildPartial();
44767         if (!result.isInitialized()) {
44768           throw newUninitializedMessageException(result);
44769         }
44770         return result;
44771       }
44772 
buildPartial()44773       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest buildPartial() {
44774         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest(this);
44775         int from_bitField0_ = bitField0_;
44776         int to_bitField0_ = 0;
44777         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
44778           to_bitField0_ |= 0x00000001;
44779         }
44780         if (procedureBuilder_ == null) {
44781           result.procedure_ = procedure_;
44782         } else {
44783           result.procedure_ = procedureBuilder_.build();
44784         }
44785         result.bitField0_ = to_bitField0_;
44786         onBuilt();
44787         return result;
44788       }
44789 
mergeFrom(com.google.protobuf.Message other)44790       public Builder mergeFrom(com.google.protobuf.Message other) {
44791         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest) {
44792           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)other);
44793         } else {
44794           super.mergeFrom(other);
44795           return this;
44796         }
44797       }
44798 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest other)44799       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest other) {
44800         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance()) return this;
44801         if (other.hasProcedure()) {
44802           mergeProcedure(other.getProcedure());
44803         }
44804         this.mergeUnknownFields(other.getUnknownFields());
44805         return this;
44806       }
44807 
isInitialized()44808       public final boolean isInitialized() {
44809         if (!hasProcedure()) {
44810 
44811           return false;
44812         }
44813         if (!getProcedure().isInitialized()) {
44814 
44815           return false;
44816         }
44817         return true;
44818       }
44819 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)44820       public Builder mergeFrom(
44821           com.google.protobuf.CodedInputStream input,
44822           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
44823           throws java.io.IOException {
44824         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest parsedMessage = null;
44825         try {
44826           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
44827         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
44828           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest) e.getUnfinishedMessage();
44829           throw e;
44830         } finally {
44831           if (parsedMessage != null) {
44832             mergeFrom(parsedMessage);
44833           }
44834         }
44835         return this;
44836       }
44837       private int bitField0_;
44838 
44839       // required .ProcedureDescription procedure = 1;
44840       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
44841       private com.google.protobuf.SingleFieldBuilder<
44842           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> procedureBuilder_;
44843       /**
44844        * <code>required .ProcedureDescription procedure = 1;</code>
44845        */
hasProcedure()44846       public boolean hasProcedure() {
44847         return ((bitField0_ & 0x00000001) == 0x00000001);
44848       }
44849       /**
44850        * <code>required .ProcedureDescription procedure = 1;</code>
44851        */
getProcedure()44852       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() {
44853         if (procedureBuilder_ == null) {
44854           return procedure_;
44855         } else {
44856           return procedureBuilder_.getMessage();
44857         }
44858       }
44859       /**
44860        * <code>required .ProcedureDescription procedure = 1;</code>
44861        */
setProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value)44862       public Builder setProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) {
44863         if (procedureBuilder_ == null) {
44864           if (value == null) {
44865             throw new NullPointerException();
44866           }
44867           procedure_ = value;
44868           onChanged();
44869         } else {
44870           procedureBuilder_.setMessage(value);
44871         }
44872         bitField0_ |= 0x00000001;
44873         return this;
44874       }
44875       /**
44876        * <code>required .ProcedureDescription procedure = 1;</code>
44877        */
setProcedure( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue)44878       public Builder setProcedure(
44879           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue) {
44880         if (procedureBuilder_ == null) {
44881           procedure_ = builderForValue.build();
44882           onChanged();
44883         } else {
44884           procedureBuilder_.setMessage(builderForValue.build());
44885         }
44886         bitField0_ |= 0x00000001;
44887         return this;
44888       }
44889       /**
44890        * <code>required .ProcedureDescription procedure = 1;</code>
44891        */
mergeProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value)44892       public Builder mergeProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) {
44893         if (procedureBuilder_ == null) {
44894           if (((bitField0_ & 0x00000001) == 0x00000001) &&
44895               procedure_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) {
44896             procedure_ =
44897               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(procedure_).mergeFrom(value).buildPartial();
44898           } else {
44899             procedure_ = value;
44900           }
44901           onChanged();
44902         } else {
44903           procedureBuilder_.mergeFrom(value);
44904         }
44905         bitField0_ |= 0x00000001;
44906         return this;
44907       }
44908       /**
44909        * <code>required .ProcedureDescription procedure = 1;</code>
44910        */
clearProcedure()44911       public Builder clearProcedure() {
44912         if (procedureBuilder_ == null) {
44913           procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
44914           onChanged();
44915         } else {
44916           procedureBuilder_.clear();
44917         }
44918         bitField0_ = (bitField0_ & ~0x00000001);
44919         return this;
44920       }
44921       /**
44922        * <code>required .ProcedureDescription procedure = 1;</code>
44923        */
getProcedureBuilder()44924       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder getProcedureBuilder() {
44925         bitField0_ |= 0x00000001;
44926         onChanged();
44927         return getProcedureFieldBuilder().getBuilder();
44928       }
44929       /**
44930        * <code>required .ProcedureDescription procedure = 1;</code>
44931        */
getProcedureOrBuilder()44932       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() {
44933         if (procedureBuilder_ != null) {
44934           return procedureBuilder_.getMessageOrBuilder();
44935         } else {
44936           return procedure_;
44937         }
44938       }
44939       /**
44940        * <code>required .ProcedureDescription procedure = 1;</code>
44941        */
44942       private com.google.protobuf.SingleFieldBuilder<
44943           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>
getProcedureFieldBuilder()44944           getProcedureFieldBuilder() {
44945         if (procedureBuilder_ == null) {
44946           procedureBuilder_ = new com.google.protobuf.SingleFieldBuilder<
44947               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>(
44948                   procedure_,
44949                   getParentForChildren(),
44950                   isClean());
44951           procedure_ = null;
44952         }
44953         return procedureBuilder_;
44954       }
44955 
44956       // @@protoc_insertion_point(builder_scope:ExecProcedureRequest)
44957     }
44958 
44959     static {
44960       defaultInstance = new ExecProcedureRequest(true);
defaultInstance.initFields()44961       defaultInstance.initFields();
44962     }
44963 
44964     // @@protoc_insertion_point(class_scope:ExecProcedureRequest)
44965   }
44966 
44967   public interface ExecProcedureResponseOrBuilder
44968       extends com.google.protobuf.MessageOrBuilder {
44969 
44970     // optional int64 expected_timeout = 1;
44971     /**
44972      * <code>optional int64 expected_timeout = 1;</code>
44973      */
hasExpectedTimeout()44974     boolean hasExpectedTimeout();
44975     /**
44976      * <code>optional int64 expected_timeout = 1;</code>
44977      */
getExpectedTimeout()44978     long getExpectedTimeout();
44979 
44980     // optional bytes return_data = 2;
44981     /**
44982      * <code>optional bytes return_data = 2;</code>
44983      */
hasReturnData()44984     boolean hasReturnData();
44985     /**
44986      * <code>optional bytes return_data = 2;</code>
44987      */
getReturnData()44988     com.google.protobuf.ByteString getReturnData();
44989   }
44990   /**
44991    * Protobuf type {@code ExecProcedureResponse}
44992    */
44993   public static final class ExecProcedureResponse extends
44994       com.google.protobuf.GeneratedMessage
44995       implements ExecProcedureResponseOrBuilder {
44996     // Use ExecProcedureResponse.newBuilder() to construct.
ExecProcedureResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)44997     private ExecProcedureResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
44998       super(builder);
44999       this.unknownFields = builder.getUnknownFields();
45000     }
ExecProcedureResponse(boolean noInit)45001     private ExecProcedureResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
45002 
45003     private static final ExecProcedureResponse defaultInstance;
getDefaultInstance()45004     public static ExecProcedureResponse getDefaultInstance() {
45005       return defaultInstance;
45006     }
45007 
getDefaultInstanceForType()45008     public ExecProcedureResponse getDefaultInstanceForType() {
45009       return defaultInstance;
45010     }
45011 
45012     private final com.google.protobuf.UnknownFieldSet unknownFields;
45013     @java.lang.Override
45014     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()45015         getUnknownFields() {
45016       return this.unknownFields;
45017     }
ExecProcedureResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45018     private ExecProcedureResponse(
45019         com.google.protobuf.CodedInputStream input,
45020         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45021         throws com.google.protobuf.InvalidProtocolBufferException {
45022       initFields();
45023       int mutable_bitField0_ = 0;
45024       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
45025           com.google.protobuf.UnknownFieldSet.newBuilder();
45026       try {
45027         boolean done = false;
45028         while (!done) {
45029           int tag = input.readTag();
45030           switch (tag) {
45031             case 0:
45032               done = true;
45033               break;
45034             default: {
45035               if (!parseUnknownField(input, unknownFields,
45036                                      extensionRegistry, tag)) {
45037                 done = true;
45038               }
45039               break;
45040             }
45041             case 8: {
45042               bitField0_ |= 0x00000001;
45043               expectedTimeout_ = input.readInt64();
45044               break;
45045             }
45046             case 18: {
45047               bitField0_ |= 0x00000002;
45048               returnData_ = input.readBytes();
45049               break;
45050             }
45051           }
45052         }
45053       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
45054         throw e.setUnfinishedMessage(this);
45055       } catch (java.io.IOException e) {
45056         throw new com.google.protobuf.InvalidProtocolBufferException(
45057             e.getMessage()).setUnfinishedMessage(this);
45058       } finally {
45059         this.unknownFields = unknownFields.build();
45060         makeExtensionsImmutable();
45061       }
45062     }
45063     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()45064         getDescriptor() {
45065       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_descriptor;
45066     }
45067 
45068     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()45069         internalGetFieldAccessorTable() {
45070       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_fieldAccessorTable
45071           .ensureFieldAccessorsInitialized(
45072               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.Builder.class);
45073     }
45074 
45075     public static com.google.protobuf.Parser<ExecProcedureResponse> PARSER =
45076         new com.google.protobuf.AbstractParser<ExecProcedureResponse>() {
45077       public ExecProcedureResponse parsePartialFrom(
45078           com.google.protobuf.CodedInputStream input,
45079           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45080           throws com.google.protobuf.InvalidProtocolBufferException {
45081         return new ExecProcedureResponse(input, extensionRegistry);
45082       }
45083     };
45084 
45085     @java.lang.Override
getParserForType()45086     public com.google.protobuf.Parser<ExecProcedureResponse> getParserForType() {
45087       return PARSER;
45088     }
45089 
45090     private int bitField0_;
45091     // optional int64 expected_timeout = 1;
45092     public static final int EXPECTED_TIMEOUT_FIELD_NUMBER = 1;
45093     private long expectedTimeout_;
45094     /**
45095      * <code>optional int64 expected_timeout = 1;</code>
45096      */
hasExpectedTimeout()45097     public boolean hasExpectedTimeout() {
45098       return ((bitField0_ & 0x00000001) == 0x00000001);
45099     }
45100     /**
45101      * <code>optional int64 expected_timeout = 1;</code>
45102      */
getExpectedTimeout()45103     public long getExpectedTimeout() {
45104       return expectedTimeout_;
45105     }
45106 
45107     // optional bytes return_data = 2;
45108     public static final int RETURN_DATA_FIELD_NUMBER = 2;
45109     private com.google.protobuf.ByteString returnData_;
45110     /**
45111      * <code>optional bytes return_data = 2;</code>
45112      */
hasReturnData()45113     public boolean hasReturnData() {
45114       return ((bitField0_ & 0x00000002) == 0x00000002);
45115     }
45116     /**
45117      * <code>optional bytes return_data = 2;</code>
45118      */
getReturnData()45119     public com.google.protobuf.ByteString getReturnData() {
45120       return returnData_;
45121     }
45122 
initFields()45123     private void initFields() {
45124       expectedTimeout_ = 0L;
45125       returnData_ = com.google.protobuf.ByteString.EMPTY;
45126     }
45127     private byte memoizedIsInitialized = -1;
isInitialized()45128     public final boolean isInitialized() {
45129       byte isInitialized = memoizedIsInitialized;
45130       if (isInitialized != -1) return isInitialized == 1;
45131 
45132       memoizedIsInitialized = 1;
45133       return true;
45134     }
45135 
writeTo(com.google.protobuf.CodedOutputStream output)45136     public void writeTo(com.google.protobuf.CodedOutputStream output)
45137                         throws java.io.IOException {
45138       getSerializedSize();
45139       if (((bitField0_ & 0x00000001) == 0x00000001)) {
45140         output.writeInt64(1, expectedTimeout_);
45141       }
45142       if (((bitField0_ & 0x00000002) == 0x00000002)) {
45143         output.writeBytes(2, returnData_);
45144       }
45145       getUnknownFields().writeTo(output);
45146     }
45147 
45148     private int memoizedSerializedSize = -1;
getSerializedSize()45149     public int getSerializedSize() {
45150       int size = memoizedSerializedSize;
45151       if (size != -1) return size;
45152 
45153       size = 0;
45154       if (((bitField0_ & 0x00000001) == 0x00000001)) {
45155         size += com.google.protobuf.CodedOutputStream
45156           .computeInt64Size(1, expectedTimeout_);
45157       }
45158       if (((bitField0_ & 0x00000002) == 0x00000002)) {
45159         size += com.google.protobuf.CodedOutputStream
45160           .computeBytesSize(2, returnData_);
45161       }
45162       size += getUnknownFields().getSerializedSize();
45163       memoizedSerializedSize = size;
45164       return size;
45165     }
45166 
45167     private static final long serialVersionUID = 0L;
45168     @java.lang.Override
writeReplace()45169     protected java.lang.Object writeReplace()
45170         throws java.io.ObjectStreamException {
45171       return super.writeReplace();
45172     }
45173 
45174     @java.lang.Override
equals(final java.lang.Object obj)45175     public boolean equals(final java.lang.Object obj) {
45176       if (obj == this) {
45177        return true;
45178       }
45179       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse)) {
45180         return super.equals(obj);
45181       }
45182       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) obj;
45183 
45184       boolean result = true;
45185       result = result && (hasExpectedTimeout() == other.hasExpectedTimeout());
45186       if (hasExpectedTimeout()) {
45187         result = result && (getExpectedTimeout()
45188             == other.getExpectedTimeout());
45189       }
45190       result = result && (hasReturnData() == other.hasReturnData());
45191       if (hasReturnData()) {
45192         result = result && getReturnData()
45193             .equals(other.getReturnData());
45194       }
45195       result = result &&
45196           getUnknownFields().equals(other.getUnknownFields());
45197       return result;
45198     }
45199 
45200     private int memoizedHashCode = 0;
45201     @java.lang.Override
hashCode()45202     public int hashCode() {
45203       if (memoizedHashCode != 0) {
45204         return memoizedHashCode;
45205       }
45206       int hash = 41;
45207       hash = (19 * hash) + getDescriptorForType().hashCode();
45208       if (hasExpectedTimeout()) {
45209         hash = (37 * hash) + EXPECTED_TIMEOUT_FIELD_NUMBER;
45210         hash = (53 * hash) + hashLong(getExpectedTimeout());
45211       }
45212       if (hasReturnData()) {
45213         hash = (37 * hash) + RETURN_DATA_FIELD_NUMBER;
45214         hash = (53 * hash) + getReturnData().hashCode();
45215       }
45216       hash = (29 * hash) + getUnknownFields().hashCode();
45217       memoizedHashCode = hash;
45218       return hash;
45219     }
45220 
parseFrom( com.google.protobuf.ByteString data)45221     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(
45222         com.google.protobuf.ByteString data)
45223         throws com.google.protobuf.InvalidProtocolBufferException {
45224       return PARSER.parseFrom(data);
45225     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45226     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(
45227         com.google.protobuf.ByteString data,
45228         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45229         throws com.google.protobuf.InvalidProtocolBufferException {
45230       return PARSER.parseFrom(data, extensionRegistry);
45231     }
parseFrom(byte[] data)45232     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(byte[] data)
45233         throws com.google.protobuf.InvalidProtocolBufferException {
45234       return PARSER.parseFrom(data);
45235     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45236     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(
45237         byte[] data,
45238         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45239         throws com.google.protobuf.InvalidProtocolBufferException {
45240       return PARSER.parseFrom(data, extensionRegistry);
45241     }
parseFrom(java.io.InputStream input)45242     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(java.io.InputStream input)
45243         throws java.io.IOException {
45244       return PARSER.parseFrom(input);
45245     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45246     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(
45247         java.io.InputStream input,
45248         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45249         throws java.io.IOException {
45250       return PARSER.parseFrom(input, extensionRegistry);
45251     }
parseDelimitedFrom(java.io.InputStream input)45252     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseDelimitedFrom(java.io.InputStream input)
45253         throws java.io.IOException {
45254       return PARSER.parseDelimitedFrom(input);
45255     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45256     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseDelimitedFrom(
45257         java.io.InputStream input,
45258         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45259         throws java.io.IOException {
45260       return PARSER.parseDelimitedFrom(input, extensionRegistry);
45261     }
parseFrom( com.google.protobuf.CodedInputStream input)45262     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(
45263         com.google.protobuf.CodedInputStream input)
45264         throws java.io.IOException {
45265       return PARSER.parseFrom(input);
45266     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45267     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parseFrom(
45268         com.google.protobuf.CodedInputStream input,
45269         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45270         throws java.io.IOException {
45271       return PARSER.parseFrom(input, extensionRegistry);
45272     }
45273 
newBuilder()45274     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()45275     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse prototype)45276     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse prototype) {
45277       return newBuilder().mergeFrom(prototype);
45278     }
toBuilder()45279     public Builder toBuilder() { return newBuilder(this); }
45280 
45281     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)45282     protected Builder newBuilderForType(
45283         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
45284       Builder builder = new Builder(parent);
45285       return builder;
45286     }
45287     /**
45288      * Protobuf type {@code ExecProcedureResponse}
45289      */
45290     public static final class Builder extends
45291         com.google.protobuf.GeneratedMessage.Builder<Builder>
45292        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponseOrBuilder {
45293       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()45294           getDescriptor() {
45295         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_descriptor;
45296       }
45297 
45298       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()45299           internalGetFieldAccessorTable() {
45300         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_fieldAccessorTable
45301             .ensureFieldAccessorsInitialized(
45302                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.Builder.class);
45303       }
45304 
45305       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.newBuilder()
Builder()45306       private Builder() {
45307         maybeForceBuilderInitialization();
45308       }
45309 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)45310       private Builder(
45311           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
45312         super(parent);
45313         maybeForceBuilderInitialization();
45314       }
maybeForceBuilderInitialization()45315       private void maybeForceBuilderInitialization() {
45316         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
45317         }
45318       }
create()45319       private static Builder create() {
45320         return new Builder();
45321       }
45322 
clear()45323       public Builder clear() {
45324         super.clear();
45325         expectedTimeout_ = 0L;
45326         bitField0_ = (bitField0_ & ~0x00000001);
45327         returnData_ = com.google.protobuf.ByteString.EMPTY;
45328         bitField0_ = (bitField0_ & ~0x00000002);
45329         return this;
45330       }
45331 
clone()45332       public Builder clone() {
45333         return create().mergeFrom(buildPartial());
45334       }
45335 
45336       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()45337           getDescriptorForType() {
45338         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ExecProcedureResponse_descriptor;
45339       }
45340 
getDefaultInstanceForType()45341       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse getDefaultInstanceForType() {
45342         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
45343       }
45344 
build()45345       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse build() {
45346         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse result = buildPartial();
45347         if (!result.isInitialized()) {
45348           throw newUninitializedMessageException(result);
45349         }
45350         return result;
45351       }
45352 
buildPartial()45353       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse buildPartial() {
45354         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse(this);
45355         int from_bitField0_ = bitField0_;
45356         int to_bitField0_ = 0;
45357         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
45358           to_bitField0_ |= 0x00000001;
45359         }
45360         result.expectedTimeout_ = expectedTimeout_;
45361         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
45362           to_bitField0_ |= 0x00000002;
45363         }
45364         result.returnData_ = returnData_;
45365         result.bitField0_ = to_bitField0_;
45366         onBuilt();
45367         return result;
45368       }
45369 
mergeFrom(com.google.protobuf.Message other)45370       public Builder mergeFrom(com.google.protobuf.Message other) {
45371         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) {
45372           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse)other);
45373         } else {
45374           super.mergeFrom(other);
45375           return this;
45376         }
45377       }
45378 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse other)45379       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse other) {
45380         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()) return this;
45381         if (other.hasExpectedTimeout()) {
45382           setExpectedTimeout(other.getExpectedTimeout());
45383         }
45384         if (other.hasReturnData()) {
45385           setReturnData(other.getReturnData());
45386         }
45387         this.mergeUnknownFields(other.getUnknownFields());
45388         return this;
45389       }
45390 
isInitialized()45391       public final boolean isInitialized() {
45392         return true;
45393       }
45394 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45395       public Builder mergeFrom(
45396           com.google.protobuf.CodedInputStream input,
45397           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45398           throws java.io.IOException {
45399         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse parsedMessage = null;
45400         try {
45401           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
45402         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
45403           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) e.getUnfinishedMessage();
45404           throw e;
45405         } finally {
45406           if (parsedMessage != null) {
45407             mergeFrom(parsedMessage);
45408           }
45409         }
45410         return this;
45411       }
45412       private int bitField0_;
45413 
45414       // optional int64 expected_timeout = 1;
45415       private long expectedTimeout_ ;
45416       /**
45417        * <code>optional int64 expected_timeout = 1;</code>
45418        */
hasExpectedTimeout()45419       public boolean hasExpectedTimeout() {
45420         return ((bitField0_ & 0x00000001) == 0x00000001);
45421       }
45422       /**
45423        * <code>optional int64 expected_timeout = 1;</code>
45424        */
getExpectedTimeout()45425       public long getExpectedTimeout() {
45426         return expectedTimeout_;
45427       }
45428       /**
45429        * <code>optional int64 expected_timeout = 1;</code>
45430        */
setExpectedTimeout(long value)45431       public Builder setExpectedTimeout(long value) {
45432         bitField0_ |= 0x00000001;
45433         expectedTimeout_ = value;
45434         onChanged();
45435         return this;
45436       }
45437       /**
45438        * <code>optional int64 expected_timeout = 1;</code>
45439        */
clearExpectedTimeout()45440       public Builder clearExpectedTimeout() {
45441         bitField0_ = (bitField0_ & ~0x00000001);
45442         expectedTimeout_ = 0L;
45443         onChanged();
45444         return this;
45445       }
45446 
45447       // optional bytes return_data = 2;
45448       private com.google.protobuf.ByteString returnData_ = com.google.protobuf.ByteString.EMPTY;
45449       /**
45450        * <code>optional bytes return_data = 2;</code>
45451        */
hasReturnData()45452       public boolean hasReturnData() {
45453         return ((bitField0_ & 0x00000002) == 0x00000002);
45454       }
45455       /**
45456        * <code>optional bytes return_data = 2;</code>
45457        */
getReturnData()45458       public com.google.protobuf.ByteString getReturnData() {
45459         return returnData_;
45460       }
45461       /**
45462        * <code>optional bytes return_data = 2;</code>
45463        */
setReturnData(com.google.protobuf.ByteString value)45464       public Builder setReturnData(com.google.protobuf.ByteString value) {
45465         if (value == null) {
45466     throw new NullPointerException();
45467   }
45468   bitField0_ |= 0x00000002;
45469         returnData_ = value;
45470         onChanged();
45471         return this;
45472       }
45473       /**
45474        * <code>optional bytes return_data = 2;</code>
45475        */
clearReturnData()45476       public Builder clearReturnData() {
45477         bitField0_ = (bitField0_ & ~0x00000002);
45478         returnData_ = getDefaultInstance().getReturnData();
45479         onChanged();
45480         return this;
45481       }
45482 
45483       // @@protoc_insertion_point(builder_scope:ExecProcedureResponse)
45484     }
45485 
45486     static {
45487       defaultInstance = new ExecProcedureResponse(true);
defaultInstance.initFields()45488       defaultInstance.initFields();
45489     }
45490 
45491     // @@protoc_insertion_point(class_scope:ExecProcedureResponse)
45492   }
45493 
45494   public interface IsProcedureDoneRequestOrBuilder
45495       extends com.google.protobuf.MessageOrBuilder {
45496 
45497     // optional .ProcedureDescription procedure = 1;
45498     /**
45499      * <code>optional .ProcedureDescription procedure = 1;</code>
45500      */
hasProcedure()45501     boolean hasProcedure();
45502     /**
45503      * <code>optional .ProcedureDescription procedure = 1;</code>
45504      */
getProcedure()45505     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure();
45506     /**
45507      * <code>optional .ProcedureDescription procedure = 1;</code>
45508      */
getProcedureOrBuilder()45509     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder();
45510   }
45511   /**
45512    * Protobuf type {@code IsProcedureDoneRequest}
45513    */
45514   public static final class IsProcedureDoneRequest extends
45515       com.google.protobuf.GeneratedMessage
45516       implements IsProcedureDoneRequestOrBuilder {
45517     // Use IsProcedureDoneRequest.newBuilder() to construct.
IsProcedureDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)45518     private IsProcedureDoneRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
45519       super(builder);
45520       this.unknownFields = builder.getUnknownFields();
45521     }
IsProcedureDoneRequest(boolean noInit)45522     private IsProcedureDoneRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
45523 
45524     private static final IsProcedureDoneRequest defaultInstance;
getDefaultInstance()45525     public static IsProcedureDoneRequest getDefaultInstance() {
45526       return defaultInstance;
45527     }
45528 
getDefaultInstanceForType()45529     public IsProcedureDoneRequest getDefaultInstanceForType() {
45530       return defaultInstance;
45531     }
45532 
45533     private final com.google.protobuf.UnknownFieldSet unknownFields;
45534     @java.lang.Override
45535     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()45536         getUnknownFields() {
45537       return this.unknownFields;
45538     }
IsProcedureDoneRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45539     private IsProcedureDoneRequest(
45540         com.google.protobuf.CodedInputStream input,
45541         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45542         throws com.google.protobuf.InvalidProtocolBufferException {
45543       initFields();
45544       int mutable_bitField0_ = 0;
45545       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
45546           com.google.protobuf.UnknownFieldSet.newBuilder();
45547       try {
45548         boolean done = false;
45549         while (!done) {
45550           int tag = input.readTag();
45551           switch (tag) {
45552             case 0:
45553               done = true;
45554               break;
45555             default: {
45556               if (!parseUnknownField(input, unknownFields,
45557                                      extensionRegistry, tag)) {
45558                 done = true;
45559               }
45560               break;
45561             }
45562             case 10: {
45563               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder subBuilder = null;
45564               if (((bitField0_ & 0x00000001) == 0x00000001)) {
45565                 subBuilder = procedure_.toBuilder();
45566               }
45567               procedure_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.PARSER, extensionRegistry);
45568               if (subBuilder != null) {
45569                 subBuilder.mergeFrom(procedure_);
45570                 procedure_ = subBuilder.buildPartial();
45571               }
45572               bitField0_ |= 0x00000001;
45573               break;
45574             }
45575           }
45576         }
45577       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
45578         throw e.setUnfinishedMessage(this);
45579       } catch (java.io.IOException e) {
45580         throw new com.google.protobuf.InvalidProtocolBufferException(
45581             e.getMessage()).setUnfinishedMessage(this);
45582       } finally {
45583         this.unknownFields = unknownFields.build();
45584         makeExtensionsImmutable();
45585       }
45586     }
45587     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()45588         getDescriptor() {
45589       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_descriptor;
45590     }
45591 
45592     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()45593         internalGetFieldAccessorTable() {
45594       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_fieldAccessorTable
45595           .ensureFieldAccessorsInitialized(
45596               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.Builder.class);
45597     }
45598 
45599     public static com.google.protobuf.Parser<IsProcedureDoneRequest> PARSER =
45600         new com.google.protobuf.AbstractParser<IsProcedureDoneRequest>() {
45601       public IsProcedureDoneRequest parsePartialFrom(
45602           com.google.protobuf.CodedInputStream input,
45603           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45604           throws com.google.protobuf.InvalidProtocolBufferException {
45605         return new IsProcedureDoneRequest(input, extensionRegistry);
45606       }
45607     };
45608 
45609     @java.lang.Override
getParserForType()45610     public com.google.protobuf.Parser<IsProcedureDoneRequest> getParserForType() {
45611       return PARSER;
45612     }
45613 
45614     private int bitField0_;
45615     // optional .ProcedureDescription procedure = 1;
45616     public static final int PROCEDURE_FIELD_NUMBER = 1;
45617     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_;
45618     /**
45619      * <code>optional .ProcedureDescription procedure = 1;</code>
45620      */
hasProcedure()45621     public boolean hasProcedure() {
45622       return ((bitField0_ & 0x00000001) == 0x00000001);
45623     }
45624     /**
45625      * <code>optional .ProcedureDescription procedure = 1;</code>
45626      */
getProcedure()45627     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() {
45628       return procedure_;
45629     }
45630     /**
45631      * <code>optional .ProcedureDescription procedure = 1;</code>
45632      */
getProcedureOrBuilder()45633     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() {
45634       return procedure_;
45635     }
45636 
initFields()45637     private void initFields() {
45638       procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
45639     }
45640     private byte memoizedIsInitialized = -1;
isInitialized()45641     public final boolean isInitialized() {
45642       byte isInitialized = memoizedIsInitialized;
45643       if (isInitialized != -1) return isInitialized == 1;
45644 
45645       if (hasProcedure()) {
45646         if (!getProcedure().isInitialized()) {
45647           memoizedIsInitialized = 0;
45648           return false;
45649         }
45650       }
45651       memoizedIsInitialized = 1;
45652       return true;
45653     }
45654 
writeTo(com.google.protobuf.CodedOutputStream output)45655     public void writeTo(com.google.protobuf.CodedOutputStream output)
45656                         throws java.io.IOException {
45657       getSerializedSize();
45658       if (((bitField0_ & 0x00000001) == 0x00000001)) {
45659         output.writeMessage(1, procedure_);
45660       }
45661       getUnknownFields().writeTo(output);
45662     }
45663 
45664     private int memoizedSerializedSize = -1;
getSerializedSize()45665     public int getSerializedSize() {
45666       int size = memoizedSerializedSize;
45667       if (size != -1) return size;
45668 
45669       size = 0;
45670       if (((bitField0_ & 0x00000001) == 0x00000001)) {
45671         size += com.google.protobuf.CodedOutputStream
45672           .computeMessageSize(1, procedure_);
45673       }
45674       size += getUnknownFields().getSerializedSize();
45675       memoizedSerializedSize = size;
45676       return size;
45677     }
45678 
45679     private static final long serialVersionUID = 0L;
45680     @java.lang.Override
writeReplace()45681     protected java.lang.Object writeReplace()
45682         throws java.io.ObjectStreamException {
45683       return super.writeReplace();
45684     }
45685 
45686     @java.lang.Override
equals(final java.lang.Object obj)45687     public boolean equals(final java.lang.Object obj) {
45688       if (obj == this) {
45689        return true;
45690       }
45691       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)) {
45692         return super.equals(obj);
45693       }
45694       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest) obj;
45695 
45696       boolean result = true;
45697       result = result && (hasProcedure() == other.hasProcedure());
45698       if (hasProcedure()) {
45699         result = result && getProcedure()
45700             .equals(other.getProcedure());
45701       }
45702       result = result &&
45703           getUnknownFields().equals(other.getUnknownFields());
45704       return result;
45705     }
45706 
45707     private int memoizedHashCode = 0;
45708     @java.lang.Override
hashCode()45709     public int hashCode() {
45710       if (memoizedHashCode != 0) {
45711         return memoizedHashCode;
45712       }
45713       int hash = 41;
45714       hash = (19 * hash) + getDescriptorForType().hashCode();
45715       if (hasProcedure()) {
45716         hash = (37 * hash) + PROCEDURE_FIELD_NUMBER;
45717         hash = (53 * hash) + getProcedure().hashCode();
45718       }
45719       hash = (29 * hash) + getUnknownFields().hashCode();
45720       memoizedHashCode = hash;
45721       return hash;
45722     }
45723 
parseFrom( com.google.protobuf.ByteString data)45724     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(
45725         com.google.protobuf.ByteString data)
45726         throws com.google.protobuf.InvalidProtocolBufferException {
45727       return PARSER.parseFrom(data);
45728     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45729     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(
45730         com.google.protobuf.ByteString data,
45731         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45732         throws com.google.protobuf.InvalidProtocolBufferException {
45733       return PARSER.parseFrom(data, extensionRegistry);
45734     }
parseFrom(byte[] data)45735     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(byte[] data)
45736         throws com.google.protobuf.InvalidProtocolBufferException {
45737       return PARSER.parseFrom(data);
45738     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45739     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(
45740         byte[] data,
45741         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45742         throws com.google.protobuf.InvalidProtocolBufferException {
45743       return PARSER.parseFrom(data, extensionRegistry);
45744     }
parseFrom(java.io.InputStream input)45745     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(java.io.InputStream input)
45746         throws java.io.IOException {
45747       return PARSER.parseFrom(input);
45748     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45749     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(
45750         java.io.InputStream input,
45751         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45752         throws java.io.IOException {
45753       return PARSER.parseFrom(input, extensionRegistry);
45754     }
parseDelimitedFrom(java.io.InputStream input)45755     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseDelimitedFrom(java.io.InputStream input)
45756         throws java.io.IOException {
45757       return PARSER.parseDelimitedFrom(input);
45758     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45759     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseDelimitedFrom(
45760         java.io.InputStream input,
45761         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45762         throws java.io.IOException {
45763       return PARSER.parseDelimitedFrom(input, extensionRegistry);
45764     }
parseFrom( com.google.protobuf.CodedInputStream input)45765     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(
45766         com.google.protobuf.CodedInputStream input)
45767         throws java.io.IOException {
45768       return PARSER.parseFrom(input);
45769     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45770     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parseFrom(
45771         com.google.protobuf.CodedInputStream input,
45772         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45773         throws java.io.IOException {
45774       return PARSER.parseFrom(input, extensionRegistry);
45775     }
45776 
newBuilder()45777     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()45778     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest prototype)45779     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest prototype) {
45780       return newBuilder().mergeFrom(prototype);
45781     }
toBuilder()45782     public Builder toBuilder() { return newBuilder(this); }
45783 
45784     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)45785     protected Builder newBuilderForType(
45786         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
45787       Builder builder = new Builder(parent);
45788       return builder;
45789     }
45790     /**
45791      * Protobuf type {@code IsProcedureDoneRequest}
45792      */
45793     public static final class Builder extends
45794         com.google.protobuf.GeneratedMessage.Builder<Builder>
45795        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequestOrBuilder {
45796       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()45797           getDescriptor() {
45798         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_descriptor;
45799       }
45800 
45801       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()45802           internalGetFieldAccessorTable() {
45803         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_fieldAccessorTable
45804             .ensureFieldAccessorsInitialized(
45805                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.Builder.class);
45806       }
45807 
45808       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.newBuilder()
Builder()45809       private Builder() {
45810         maybeForceBuilderInitialization();
45811       }
45812 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)45813       private Builder(
45814           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
45815         super(parent);
45816         maybeForceBuilderInitialization();
45817       }
maybeForceBuilderInitialization()45818       private void maybeForceBuilderInitialization() {
45819         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
45820           getProcedureFieldBuilder();
45821         }
45822       }
create()45823       private static Builder create() {
45824         return new Builder();
45825       }
45826 
clear()45827       public Builder clear() {
45828         super.clear();
45829         if (procedureBuilder_ == null) {
45830           procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
45831         } else {
45832           procedureBuilder_.clear();
45833         }
45834         bitField0_ = (bitField0_ & ~0x00000001);
45835         return this;
45836       }
45837 
clone()45838       public Builder clone() {
45839         return create().mergeFrom(buildPartial());
45840       }
45841 
45842       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()45843           getDescriptorForType() {
45844         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneRequest_descriptor;
45845       }
45846 
getDefaultInstanceForType()45847       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest getDefaultInstanceForType() {
45848         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
45849       }
45850 
build()45851       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest build() {
45852         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest result = buildPartial();
45853         if (!result.isInitialized()) {
45854           throw newUninitializedMessageException(result);
45855         }
45856         return result;
45857       }
45858 
buildPartial()45859       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest buildPartial() {
45860         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest(this);
45861         int from_bitField0_ = bitField0_;
45862         int to_bitField0_ = 0;
45863         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
45864           to_bitField0_ |= 0x00000001;
45865         }
45866         if (procedureBuilder_ == null) {
45867           result.procedure_ = procedure_;
45868         } else {
45869           result.procedure_ = procedureBuilder_.build();
45870         }
45871         result.bitField0_ = to_bitField0_;
45872         onBuilt();
45873         return result;
45874       }
45875 
mergeFrom(com.google.protobuf.Message other)45876       public Builder mergeFrom(com.google.protobuf.Message other) {
45877         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest) {
45878           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)other);
45879         } else {
45880           super.mergeFrom(other);
45881           return this;
45882         }
45883       }
45884 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest other)45885       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest other) {
45886         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance()) return this;
45887         if (other.hasProcedure()) {
45888           mergeProcedure(other.getProcedure());
45889         }
45890         this.mergeUnknownFields(other.getUnknownFields());
45891         return this;
45892       }
45893 
isInitialized()45894       public final boolean isInitialized() {
45895         if (hasProcedure()) {
45896           if (!getProcedure().isInitialized()) {
45897 
45898             return false;
45899           }
45900         }
45901         return true;
45902       }
45903 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)45904       public Builder mergeFrom(
45905           com.google.protobuf.CodedInputStream input,
45906           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
45907           throws java.io.IOException {
45908         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest parsedMessage = null;
45909         try {
45910           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
45911         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
45912           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest) e.getUnfinishedMessage();
45913           throw e;
45914         } finally {
45915           if (parsedMessage != null) {
45916             mergeFrom(parsedMessage);
45917           }
45918         }
45919         return this;
45920       }
45921       private int bitField0_;
45922 
45923       // optional .ProcedureDescription procedure = 1;
45924       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
45925       private com.google.protobuf.SingleFieldBuilder<
45926           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> procedureBuilder_;
45927       /**
45928        * <code>optional .ProcedureDescription procedure = 1;</code>
45929        */
hasProcedure()45930       public boolean hasProcedure() {
45931         return ((bitField0_ & 0x00000001) == 0x00000001);
45932       }
45933       /**
45934        * <code>optional .ProcedureDescription procedure = 1;</code>
45935        */
getProcedure()45936       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getProcedure() {
45937         if (procedureBuilder_ == null) {
45938           return procedure_;
45939         } else {
45940           return procedureBuilder_.getMessage();
45941         }
45942       }
45943       /**
45944        * <code>optional .ProcedureDescription procedure = 1;</code>
45945        */
setProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value)45946       public Builder setProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) {
45947         if (procedureBuilder_ == null) {
45948           if (value == null) {
45949             throw new NullPointerException();
45950           }
45951           procedure_ = value;
45952           onChanged();
45953         } else {
45954           procedureBuilder_.setMessage(value);
45955         }
45956         bitField0_ |= 0x00000001;
45957         return this;
45958       }
45959       /**
45960        * <code>optional .ProcedureDescription procedure = 1;</code>
45961        */
setProcedure( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue)45962       public Builder setProcedure(
45963           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue) {
45964         if (procedureBuilder_ == null) {
45965           procedure_ = builderForValue.build();
45966           onChanged();
45967         } else {
45968           procedureBuilder_.setMessage(builderForValue.build());
45969         }
45970         bitField0_ |= 0x00000001;
45971         return this;
45972       }
45973       /**
45974        * <code>optional .ProcedureDescription procedure = 1;</code>
45975        */
mergeProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value)45976       public Builder mergeProcedure(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) {
45977         if (procedureBuilder_ == null) {
45978           if (((bitField0_ & 0x00000001) == 0x00000001) &&
45979               procedure_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) {
45980             procedure_ =
45981               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(procedure_).mergeFrom(value).buildPartial();
45982           } else {
45983             procedure_ = value;
45984           }
45985           onChanged();
45986         } else {
45987           procedureBuilder_.mergeFrom(value);
45988         }
45989         bitField0_ |= 0x00000001;
45990         return this;
45991       }
45992       /**
45993        * <code>optional .ProcedureDescription procedure = 1;</code>
45994        */
clearProcedure()45995       public Builder clearProcedure() {
45996         if (procedureBuilder_ == null) {
45997           procedure_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
45998           onChanged();
45999         } else {
46000           procedureBuilder_.clear();
46001         }
46002         bitField0_ = (bitField0_ & ~0x00000001);
46003         return this;
46004       }
46005       /**
46006        * <code>optional .ProcedureDescription procedure = 1;</code>
46007        */
getProcedureBuilder()46008       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder getProcedureBuilder() {
46009         bitField0_ |= 0x00000001;
46010         onChanged();
46011         return getProcedureFieldBuilder().getBuilder();
46012       }
46013       /**
46014        * <code>optional .ProcedureDescription procedure = 1;</code>
46015        */
getProcedureOrBuilder()46016       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getProcedureOrBuilder() {
46017         if (procedureBuilder_ != null) {
46018           return procedureBuilder_.getMessageOrBuilder();
46019         } else {
46020           return procedure_;
46021         }
46022       }
46023       /**
46024        * <code>optional .ProcedureDescription procedure = 1;</code>
46025        */
46026       private com.google.protobuf.SingleFieldBuilder<
46027           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>
getProcedureFieldBuilder()46028           getProcedureFieldBuilder() {
46029         if (procedureBuilder_ == null) {
46030           procedureBuilder_ = new com.google.protobuf.SingleFieldBuilder<
46031               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>(
46032                   procedure_,
46033                   getParentForChildren(),
46034                   isClean());
46035           procedure_ = null;
46036         }
46037         return procedureBuilder_;
46038       }
46039 
46040       // @@protoc_insertion_point(builder_scope:IsProcedureDoneRequest)
46041     }
46042 
46043     static {
46044       defaultInstance = new IsProcedureDoneRequest(true);
defaultInstance.initFields()46045       defaultInstance.initFields();
46046     }
46047 
46048     // @@protoc_insertion_point(class_scope:IsProcedureDoneRequest)
46049   }
46050 
46051   public interface IsProcedureDoneResponseOrBuilder
46052       extends com.google.protobuf.MessageOrBuilder {
46053 
46054     // optional bool done = 1 [default = false];
46055     /**
46056      * <code>optional bool done = 1 [default = false];</code>
46057      */
hasDone()46058     boolean hasDone();
46059     /**
46060      * <code>optional bool done = 1 [default = false];</code>
46061      */
getDone()46062     boolean getDone();
46063 
46064     // optional .ProcedureDescription snapshot = 2;
46065     /**
46066      * <code>optional .ProcedureDescription snapshot = 2;</code>
46067      */
hasSnapshot()46068     boolean hasSnapshot();
46069     /**
46070      * <code>optional .ProcedureDescription snapshot = 2;</code>
46071      */
getSnapshot()46072     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot();
46073     /**
46074      * <code>optional .ProcedureDescription snapshot = 2;</code>
46075      */
getSnapshotOrBuilder()46076     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder();
46077   }
46078   /**
46079    * Protobuf type {@code IsProcedureDoneResponse}
46080    */
46081   public static final class IsProcedureDoneResponse extends
46082       com.google.protobuf.GeneratedMessage
46083       implements IsProcedureDoneResponseOrBuilder {
46084     // Use IsProcedureDoneResponse.newBuilder() to construct.
IsProcedureDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)46085     private IsProcedureDoneResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
46086       super(builder);
46087       this.unknownFields = builder.getUnknownFields();
46088     }
IsProcedureDoneResponse(boolean noInit)46089     private IsProcedureDoneResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
46090 
46091     private static final IsProcedureDoneResponse defaultInstance;
getDefaultInstance()46092     public static IsProcedureDoneResponse getDefaultInstance() {
46093       return defaultInstance;
46094     }
46095 
getDefaultInstanceForType()46096     public IsProcedureDoneResponse getDefaultInstanceForType() {
46097       return defaultInstance;
46098     }
46099 
46100     private final com.google.protobuf.UnknownFieldSet unknownFields;
46101     @java.lang.Override
46102     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()46103         getUnknownFields() {
46104       return this.unknownFields;
46105     }
IsProcedureDoneResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46106     private IsProcedureDoneResponse(
46107         com.google.protobuf.CodedInputStream input,
46108         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46109         throws com.google.protobuf.InvalidProtocolBufferException {
46110       initFields();
46111       int mutable_bitField0_ = 0;
46112       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
46113           com.google.protobuf.UnknownFieldSet.newBuilder();
46114       try {
46115         boolean done = false;
46116         while (!done) {
46117           int tag = input.readTag();
46118           switch (tag) {
46119             case 0:
46120               done = true;
46121               break;
46122             default: {
46123               if (!parseUnknownField(input, unknownFields,
46124                                      extensionRegistry, tag)) {
46125                 done = true;
46126               }
46127               break;
46128             }
46129             case 8: {
46130               bitField0_ |= 0x00000001;
46131               done_ = input.readBool();
46132               break;
46133             }
46134             case 18: {
46135               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder subBuilder = null;
46136               if (((bitField0_ & 0x00000002) == 0x00000002)) {
46137                 subBuilder = snapshot_.toBuilder();
46138               }
46139               snapshot_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.PARSER, extensionRegistry);
46140               if (subBuilder != null) {
46141                 subBuilder.mergeFrom(snapshot_);
46142                 snapshot_ = subBuilder.buildPartial();
46143               }
46144               bitField0_ |= 0x00000002;
46145               break;
46146             }
46147           }
46148         }
46149       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
46150         throw e.setUnfinishedMessage(this);
46151       } catch (java.io.IOException e) {
46152         throw new com.google.protobuf.InvalidProtocolBufferException(
46153             e.getMessage()).setUnfinishedMessage(this);
46154       } finally {
46155         this.unknownFields = unknownFields.build();
46156         makeExtensionsImmutable();
46157       }
46158     }
46159     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()46160         getDescriptor() {
46161       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_descriptor;
46162     }
46163 
46164     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()46165         internalGetFieldAccessorTable() {
46166       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_fieldAccessorTable
46167           .ensureFieldAccessorsInitialized(
46168               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.Builder.class);
46169     }
46170 
46171     public static com.google.protobuf.Parser<IsProcedureDoneResponse> PARSER =
46172         new com.google.protobuf.AbstractParser<IsProcedureDoneResponse>() {
46173       public IsProcedureDoneResponse parsePartialFrom(
46174           com.google.protobuf.CodedInputStream input,
46175           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46176           throws com.google.protobuf.InvalidProtocolBufferException {
46177         return new IsProcedureDoneResponse(input, extensionRegistry);
46178       }
46179     };
46180 
46181     @java.lang.Override
getParserForType()46182     public com.google.protobuf.Parser<IsProcedureDoneResponse> getParserForType() {
46183       return PARSER;
46184     }
46185 
46186     private int bitField0_;
46187     // optional bool done = 1 [default = false];
46188     public static final int DONE_FIELD_NUMBER = 1;
46189     private boolean done_;
46190     /**
46191      * <code>optional bool done = 1 [default = false];</code>
46192      */
hasDone()46193     public boolean hasDone() {
46194       return ((bitField0_ & 0x00000001) == 0x00000001);
46195     }
46196     /**
46197      * <code>optional bool done = 1 [default = false];</code>
46198      */
getDone()46199     public boolean getDone() {
46200       return done_;
46201     }
46202 
46203     // optional .ProcedureDescription snapshot = 2;
46204     public static final int SNAPSHOT_FIELD_NUMBER = 2;
46205     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_;
46206     /**
46207      * <code>optional .ProcedureDescription snapshot = 2;</code>
46208      */
hasSnapshot()46209     public boolean hasSnapshot() {
46210       return ((bitField0_ & 0x00000002) == 0x00000002);
46211     }
46212     /**
46213      * <code>optional .ProcedureDescription snapshot = 2;</code>
46214      */
getSnapshot()46215     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot() {
46216       return snapshot_;
46217     }
46218     /**
46219      * <code>optional .ProcedureDescription snapshot = 2;</code>
46220      */
getSnapshotOrBuilder()46221     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder() {
46222       return snapshot_;
46223     }
46224 
initFields()46225     private void initFields() {
46226       done_ = false;
46227       snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
46228     }
46229     private byte memoizedIsInitialized = -1;
isInitialized()46230     public final boolean isInitialized() {
46231       byte isInitialized = memoizedIsInitialized;
46232       if (isInitialized != -1) return isInitialized == 1;
46233 
46234       if (hasSnapshot()) {
46235         if (!getSnapshot().isInitialized()) {
46236           memoizedIsInitialized = 0;
46237           return false;
46238         }
46239       }
46240       memoizedIsInitialized = 1;
46241       return true;
46242     }
46243 
writeTo(com.google.protobuf.CodedOutputStream output)46244     public void writeTo(com.google.protobuf.CodedOutputStream output)
46245                         throws java.io.IOException {
46246       getSerializedSize();
46247       if (((bitField0_ & 0x00000001) == 0x00000001)) {
46248         output.writeBool(1, done_);
46249       }
46250       if (((bitField0_ & 0x00000002) == 0x00000002)) {
46251         output.writeMessage(2, snapshot_);
46252       }
46253       getUnknownFields().writeTo(output);
46254     }
46255 
46256     private int memoizedSerializedSize = -1;
getSerializedSize()46257     public int getSerializedSize() {
46258       int size = memoizedSerializedSize;
46259       if (size != -1) return size;
46260 
46261       size = 0;
46262       if (((bitField0_ & 0x00000001) == 0x00000001)) {
46263         size += com.google.protobuf.CodedOutputStream
46264           .computeBoolSize(1, done_);
46265       }
46266       if (((bitField0_ & 0x00000002) == 0x00000002)) {
46267         size += com.google.protobuf.CodedOutputStream
46268           .computeMessageSize(2, snapshot_);
46269       }
46270       size += getUnknownFields().getSerializedSize();
46271       memoizedSerializedSize = size;
46272       return size;
46273     }
46274 
46275     private static final long serialVersionUID = 0L;
46276     @java.lang.Override
writeReplace()46277     protected java.lang.Object writeReplace()
46278         throws java.io.ObjectStreamException {
46279       return super.writeReplace();
46280     }
46281 
46282     @java.lang.Override
equals(final java.lang.Object obj)46283     public boolean equals(final java.lang.Object obj) {
46284       if (obj == this) {
46285        return true;
46286       }
46287       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse)) {
46288         return super.equals(obj);
46289       }
46290       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) obj;
46291 
46292       boolean result = true;
46293       result = result && (hasDone() == other.hasDone());
46294       if (hasDone()) {
46295         result = result && (getDone()
46296             == other.getDone());
46297       }
46298       result = result && (hasSnapshot() == other.hasSnapshot());
46299       if (hasSnapshot()) {
46300         result = result && getSnapshot()
46301             .equals(other.getSnapshot());
46302       }
46303       result = result &&
46304           getUnknownFields().equals(other.getUnknownFields());
46305       return result;
46306     }
46307 
46308     private int memoizedHashCode = 0;
46309     @java.lang.Override
hashCode()46310     public int hashCode() {
46311       if (memoizedHashCode != 0) {
46312         return memoizedHashCode;
46313       }
46314       int hash = 41;
46315       hash = (19 * hash) + getDescriptorForType().hashCode();
46316       if (hasDone()) {
46317         hash = (37 * hash) + DONE_FIELD_NUMBER;
46318         hash = (53 * hash) + hashBoolean(getDone());
46319       }
46320       if (hasSnapshot()) {
46321         hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
46322         hash = (53 * hash) + getSnapshot().hashCode();
46323       }
46324       hash = (29 * hash) + getUnknownFields().hashCode();
46325       memoizedHashCode = hash;
46326       return hash;
46327     }
46328 
parseFrom( com.google.protobuf.ByteString data)46329     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(
46330         com.google.protobuf.ByteString data)
46331         throws com.google.protobuf.InvalidProtocolBufferException {
46332       return PARSER.parseFrom(data);
46333     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46334     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(
46335         com.google.protobuf.ByteString data,
46336         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46337         throws com.google.protobuf.InvalidProtocolBufferException {
46338       return PARSER.parseFrom(data, extensionRegistry);
46339     }
parseFrom(byte[] data)46340     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(byte[] data)
46341         throws com.google.protobuf.InvalidProtocolBufferException {
46342       return PARSER.parseFrom(data);
46343     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46344     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(
46345         byte[] data,
46346         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46347         throws com.google.protobuf.InvalidProtocolBufferException {
46348       return PARSER.parseFrom(data, extensionRegistry);
46349     }
parseFrom(java.io.InputStream input)46350     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(java.io.InputStream input)
46351         throws java.io.IOException {
46352       return PARSER.parseFrom(input);
46353     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46354     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(
46355         java.io.InputStream input,
46356         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46357         throws java.io.IOException {
46358       return PARSER.parseFrom(input, extensionRegistry);
46359     }
parseDelimitedFrom(java.io.InputStream input)46360     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseDelimitedFrom(java.io.InputStream input)
46361         throws java.io.IOException {
46362       return PARSER.parseDelimitedFrom(input);
46363     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46364     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseDelimitedFrom(
46365         java.io.InputStream input,
46366         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46367         throws java.io.IOException {
46368       return PARSER.parseDelimitedFrom(input, extensionRegistry);
46369     }
parseFrom( com.google.protobuf.CodedInputStream input)46370     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(
46371         com.google.protobuf.CodedInputStream input)
46372         throws java.io.IOException {
46373       return PARSER.parseFrom(input);
46374     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46375     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parseFrom(
46376         com.google.protobuf.CodedInputStream input,
46377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46378         throws java.io.IOException {
46379       return PARSER.parseFrom(input, extensionRegistry);
46380     }
46381 
newBuilder()46382     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()46383     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse prototype)46384     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse prototype) {
46385       return newBuilder().mergeFrom(prototype);
46386     }
toBuilder()46387     public Builder toBuilder() { return newBuilder(this); }
46388 
46389     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)46390     protected Builder newBuilderForType(
46391         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
46392       Builder builder = new Builder(parent);
46393       return builder;
46394     }
46395     /**
46396      * Protobuf type {@code IsProcedureDoneResponse}
46397      */
46398     public static final class Builder extends
46399         com.google.protobuf.GeneratedMessage.Builder<Builder>
46400        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponseOrBuilder {
46401       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()46402           getDescriptor() {
46403         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_descriptor;
46404       }
46405 
46406       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()46407           internalGetFieldAccessorTable() {
46408         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_fieldAccessorTable
46409             .ensureFieldAccessorsInitialized(
46410                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.Builder.class);
46411       }
46412 
46413       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.newBuilder()
Builder()46414       private Builder() {
46415         maybeForceBuilderInitialization();
46416       }
46417 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)46418       private Builder(
46419           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
46420         super(parent);
46421         maybeForceBuilderInitialization();
46422       }
maybeForceBuilderInitialization()46423       private void maybeForceBuilderInitialization() {
46424         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
46425           getSnapshotFieldBuilder();
46426         }
46427       }
create()46428       private static Builder create() {
46429         return new Builder();
46430       }
46431 
clear()46432       public Builder clear() {
46433         super.clear();
46434         done_ = false;
46435         bitField0_ = (bitField0_ & ~0x00000001);
46436         if (snapshotBuilder_ == null) {
46437           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
46438         } else {
46439           snapshotBuilder_.clear();
46440         }
46441         bitField0_ = (bitField0_ & ~0x00000002);
46442         return this;
46443       }
46444 
clone()46445       public Builder clone() {
46446         return create().mergeFrom(buildPartial());
46447       }
46448 
46449       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()46450           getDescriptorForType() {
46451         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsProcedureDoneResponse_descriptor;
46452       }
46453 
getDefaultInstanceForType()46454       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse getDefaultInstanceForType() {
46455         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
46456       }
46457 
build()46458       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse build() {
46459         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse result = buildPartial();
46460         if (!result.isInitialized()) {
46461           throw newUninitializedMessageException(result);
46462         }
46463         return result;
46464       }
46465 
buildPartial()46466       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse buildPartial() {
46467         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse(this);
46468         int from_bitField0_ = bitField0_;
46469         int to_bitField0_ = 0;
46470         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
46471           to_bitField0_ |= 0x00000001;
46472         }
46473         result.done_ = done_;
46474         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
46475           to_bitField0_ |= 0x00000002;
46476         }
46477         if (snapshotBuilder_ == null) {
46478           result.snapshot_ = snapshot_;
46479         } else {
46480           result.snapshot_ = snapshotBuilder_.build();
46481         }
46482         result.bitField0_ = to_bitField0_;
46483         onBuilt();
46484         return result;
46485       }
46486 
mergeFrom(com.google.protobuf.Message other)46487       public Builder mergeFrom(com.google.protobuf.Message other) {
46488         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) {
46489           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse)other);
46490         } else {
46491           super.mergeFrom(other);
46492           return this;
46493         }
46494       }
46495 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse other)46496       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse other) {
46497         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance()) return this;
46498         if (other.hasDone()) {
46499           setDone(other.getDone());
46500         }
46501         if (other.hasSnapshot()) {
46502           mergeSnapshot(other.getSnapshot());
46503         }
46504         this.mergeUnknownFields(other.getUnknownFields());
46505         return this;
46506       }
46507 
isInitialized()46508       public final boolean isInitialized() {
46509         if (hasSnapshot()) {
46510           if (!getSnapshot().isInitialized()) {
46511 
46512             return false;
46513           }
46514         }
46515         return true;
46516       }
46517 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46518       public Builder mergeFrom(
46519           com.google.protobuf.CodedInputStream input,
46520           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46521           throws java.io.IOException {
46522         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse parsedMessage = null;
46523         try {
46524           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
46525         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
46526           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) e.getUnfinishedMessage();
46527           throw e;
46528         } finally {
46529           if (parsedMessage != null) {
46530             mergeFrom(parsedMessage);
46531           }
46532         }
46533         return this;
46534       }
46535       private int bitField0_;
46536 
46537       // optional bool done = 1 [default = false];
46538       private boolean done_ ;
46539       /**
46540        * <code>optional bool done = 1 [default = false];</code>
46541        */
hasDone()46542       public boolean hasDone() {
46543         return ((bitField0_ & 0x00000001) == 0x00000001);
46544       }
46545       /**
46546        * <code>optional bool done = 1 [default = false];</code>
46547        */
getDone()46548       public boolean getDone() {
46549         return done_;
46550       }
46551       /**
46552        * <code>optional bool done = 1 [default = false];</code>
46553        */
setDone(boolean value)46554       public Builder setDone(boolean value) {
46555         bitField0_ |= 0x00000001;
46556         done_ = value;
46557         onChanged();
46558         return this;
46559       }
46560       /**
46561        * <code>optional bool done = 1 [default = false];</code>
46562        */
clearDone()46563       public Builder clearDone() {
46564         bitField0_ = (bitField0_ & ~0x00000001);
46565         done_ = false;
46566         onChanged();
46567         return this;
46568       }
46569 
46570       // optional .ProcedureDescription snapshot = 2;
46571       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
46572       private com.google.protobuf.SingleFieldBuilder<
46573           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder> snapshotBuilder_;
46574       /**
46575        * <code>optional .ProcedureDescription snapshot = 2;</code>
46576        */
hasSnapshot()46577       public boolean hasSnapshot() {
46578         return ((bitField0_ & 0x00000002) == 0x00000002);
46579       }
46580       /**
46581        * <code>optional .ProcedureDescription snapshot = 2;</code>
46582        */
getSnapshot()46583       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription getSnapshot() {
46584         if (snapshotBuilder_ == null) {
46585           return snapshot_;
46586         } else {
46587           return snapshotBuilder_.getMessage();
46588         }
46589       }
46590       /**
46591        * <code>optional .ProcedureDescription snapshot = 2;</code>
46592        */
setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value)46593       public Builder setSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) {
46594         if (snapshotBuilder_ == null) {
46595           if (value == null) {
46596             throw new NullPointerException();
46597           }
46598           snapshot_ = value;
46599           onChanged();
46600         } else {
46601           snapshotBuilder_.setMessage(value);
46602         }
46603         bitField0_ |= 0x00000002;
46604         return this;
46605       }
46606       /**
46607        * <code>optional .ProcedureDescription snapshot = 2;</code>
46608        */
setSnapshot( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue)46609       public Builder setSnapshot(
46610           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder builderForValue) {
46611         if (snapshotBuilder_ == null) {
46612           snapshot_ = builderForValue.build();
46613           onChanged();
46614         } else {
46615           snapshotBuilder_.setMessage(builderForValue.build());
46616         }
46617         bitField0_ |= 0x00000002;
46618         return this;
46619       }
46620       /**
46621        * <code>optional .ProcedureDescription snapshot = 2;</code>
46622        */
mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value)46623       public Builder mergeSnapshot(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription value) {
46624         if (snapshotBuilder_ == null) {
46625           if (((bitField0_ & 0x00000002) == 0x00000002) &&
46626               snapshot_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance()) {
46627             snapshot_ =
46628               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.newBuilder(snapshot_).mergeFrom(value).buildPartial();
46629           } else {
46630             snapshot_ = value;
46631           }
46632           onChanged();
46633         } else {
46634           snapshotBuilder_.mergeFrom(value);
46635         }
46636         bitField0_ |= 0x00000002;
46637         return this;
46638       }
46639       /**
46640        * <code>optional .ProcedureDescription snapshot = 2;</code>
46641        */
clearSnapshot()46642       public Builder clearSnapshot() {
46643         if (snapshotBuilder_ == null) {
46644           snapshot_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.getDefaultInstance();
46645           onChanged();
46646         } else {
46647           snapshotBuilder_.clear();
46648         }
46649         bitField0_ = (bitField0_ & ~0x00000002);
46650         return this;
46651       }
46652       /**
46653        * <code>optional .ProcedureDescription snapshot = 2;</code>
46654        */
getSnapshotBuilder()46655       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder getSnapshotBuilder() {
46656         bitField0_ |= 0x00000002;
46657         onChanged();
46658         return getSnapshotFieldBuilder().getBuilder();
46659       }
46660       /**
46661        * <code>optional .ProcedureDescription snapshot = 2;</code>
46662        */
getSnapshotOrBuilder()46663       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder getSnapshotOrBuilder() {
46664         if (snapshotBuilder_ != null) {
46665           return snapshotBuilder_.getMessageOrBuilder();
46666         } else {
46667           return snapshot_;
46668         }
46669       }
46670       /**
46671        * <code>optional .ProcedureDescription snapshot = 2;</code>
46672        */
46673       private com.google.protobuf.SingleFieldBuilder<
46674           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>
getSnapshotFieldBuilder()46675           getSnapshotFieldBuilder() {
46676         if (snapshotBuilder_ == null) {
46677           snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilder<
46678               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescriptionOrBuilder>(
46679                   snapshot_,
46680                   getParentForChildren(),
46681                   isClean());
46682           snapshot_ = null;
46683         }
46684         return snapshotBuilder_;
46685       }
46686 
46687       // @@protoc_insertion_point(builder_scope:IsProcedureDoneResponse)
46688     }
46689 
46690     static {
46691       defaultInstance = new IsProcedureDoneResponse(true);
defaultInstance.initFields()46692       defaultInstance.initFields();
46693     }
46694 
46695     // @@protoc_insertion_point(class_scope:IsProcedureDoneResponse)
46696   }
46697 
46698   public interface GetProcedureResultRequestOrBuilder
46699       extends com.google.protobuf.MessageOrBuilder {
46700 
46701     // required uint64 proc_id = 1;
46702     /**
46703      * <code>required uint64 proc_id = 1;</code>
46704      */
hasProcId()46705     boolean hasProcId();
46706     /**
46707      * <code>required uint64 proc_id = 1;</code>
46708      */
getProcId()46709     long getProcId();
46710   }
46711   /**
46712    * Protobuf type {@code GetProcedureResultRequest}
46713    */
46714   public static final class GetProcedureResultRequest extends
46715       com.google.protobuf.GeneratedMessage
46716       implements GetProcedureResultRequestOrBuilder {
46717     // Use GetProcedureResultRequest.newBuilder() to construct.
GetProcedureResultRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)46718     private GetProcedureResultRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
46719       super(builder);
46720       this.unknownFields = builder.getUnknownFields();
46721     }
GetProcedureResultRequest(boolean noInit)46722     private GetProcedureResultRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
46723 
46724     private static final GetProcedureResultRequest defaultInstance;
getDefaultInstance()46725     public static GetProcedureResultRequest getDefaultInstance() {
46726       return defaultInstance;
46727     }
46728 
getDefaultInstanceForType()46729     public GetProcedureResultRequest getDefaultInstanceForType() {
46730       return defaultInstance;
46731     }
46732 
46733     private final com.google.protobuf.UnknownFieldSet unknownFields;
46734     @java.lang.Override
46735     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()46736         getUnknownFields() {
46737       return this.unknownFields;
46738     }
GetProcedureResultRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46739     private GetProcedureResultRequest(
46740         com.google.protobuf.CodedInputStream input,
46741         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46742         throws com.google.protobuf.InvalidProtocolBufferException {
46743       initFields();
46744       int mutable_bitField0_ = 0;
46745       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
46746           com.google.protobuf.UnknownFieldSet.newBuilder();
46747       try {
46748         boolean done = false;
46749         while (!done) {
46750           int tag = input.readTag();
46751           switch (tag) {
46752             case 0:
46753               done = true;
46754               break;
46755             default: {
46756               if (!parseUnknownField(input, unknownFields,
46757                                      extensionRegistry, tag)) {
46758                 done = true;
46759               }
46760               break;
46761             }
46762             case 8: {
46763               bitField0_ |= 0x00000001;
46764               procId_ = input.readUInt64();
46765               break;
46766             }
46767           }
46768         }
46769       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
46770         throw e.setUnfinishedMessage(this);
46771       } catch (java.io.IOException e) {
46772         throw new com.google.protobuf.InvalidProtocolBufferException(
46773             e.getMessage()).setUnfinishedMessage(this);
46774       } finally {
46775         this.unknownFields = unknownFields.build();
46776         makeExtensionsImmutable();
46777       }
46778     }
46779     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()46780         getDescriptor() {
46781       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultRequest_descriptor;
46782     }
46783 
46784     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()46785         internalGetFieldAccessorTable() {
46786       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultRequest_fieldAccessorTable
46787           .ensureFieldAccessorsInitialized(
46788               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.Builder.class);
46789     }
46790 
46791     public static com.google.protobuf.Parser<GetProcedureResultRequest> PARSER =
46792         new com.google.protobuf.AbstractParser<GetProcedureResultRequest>() {
46793       public GetProcedureResultRequest parsePartialFrom(
46794           com.google.protobuf.CodedInputStream input,
46795           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46796           throws com.google.protobuf.InvalidProtocolBufferException {
46797         return new GetProcedureResultRequest(input, extensionRegistry);
46798       }
46799     };
46800 
46801     @java.lang.Override
getParserForType()46802     public com.google.protobuf.Parser<GetProcedureResultRequest> getParserForType() {
46803       return PARSER;
46804     }
46805 
46806     private int bitField0_;
46807     // required uint64 proc_id = 1;
46808     public static final int PROC_ID_FIELD_NUMBER = 1;
46809     private long procId_;
46810     /**
46811      * <code>required uint64 proc_id = 1;</code>
46812      */
hasProcId()46813     public boolean hasProcId() {
46814       return ((bitField0_ & 0x00000001) == 0x00000001);
46815     }
46816     /**
46817      * <code>required uint64 proc_id = 1;</code>
46818      */
getProcId()46819     public long getProcId() {
46820       return procId_;
46821     }
46822 
initFields()46823     private void initFields() {
46824       procId_ = 0L;
46825     }
46826     private byte memoizedIsInitialized = -1;
isInitialized()46827     public final boolean isInitialized() {
46828       byte isInitialized = memoizedIsInitialized;
46829       if (isInitialized != -1) return isInitialized == 1;
46830 
46831       if (!hasProcId()) {
46832         memoizedIsInitialized = 0;
46833         return false;
46834       }
46835       memoizedIsInitialized = 1;
46836       return true;
46837     }
46838 
writeTo(com.google.protobuf.CodedOutputStream output)46839     public void writeTo(com.google.protobuf.CodedOutputStream output)
46840                         throws java.io.IOException {
46841       getSerializedSize();
46842       if (((bitField0_ & 0x00000001) == 0x00000001)) {
46843         output.writeUInt64(1, procId_);
46844       }
46845       getUnknownFields().writeTo(output);
46846     }
46847 
46848     private int memoizedSerializedSize = -1;
getSerializedSize()46849     public int getSerializedSize() {
46850       int size = memoizedSerializedSize;
46851       if (size != -1) return size;
46852 
46853       size = 0;
46854       if (((bitField0_ & 0x00000001) == 0x00000001)) {
46855         size += com.google.protobuf.CodedOutputStream
46856           .computeUInt64Size(1, procId_);
46857       }
46858       size += getUnknownFields().getSerializedSize();
46859       memoizedSerializedSize = size;
46860       return size;
46861     }
46862 
46863     private static final long serialVersionUID = 0L;
46864     @java.lang.Override
writeReplace()46865     protected java.lang.Object writeReplace()
46866         throws java.io.ObjectStreamException {
46867       return super.writeReplace();
46868     }
46869 
46870     @java.lang.Override
equals(final java.lang.Object obj)46871     public boolean equals(final java.lang.Object obj) {
46872       if (obj == this) {
46873        return true;
46874       }
46875       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)) {
46876         return super.equals(obj);
46877       }
46878       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest) obj;
46879 
46880       boolean result = true;
46881       result = result && (hasProcId() == other.hasProcId());
46882       if (hasProcId()) {
46883         result = result && (getProcId()
46884             == other.getProcId());
46885       }
46886       result = result &&
46887           getUnknownFields().equals(other.getUnknownFields());
46888       return result;
46889     }
46890 
46891     private int memoizedHashCode = 0;
46892     @java.lang.Override
hashCode()46893     public int hashCode() {
46894       if (memoizedHashCode != 0) {
46895         return memoizedHashCode;
46896       }
46897       int hash = 41;
46898       hash = (19 * hash) + getDescriptorForType().hashCode();
46899       if (hasProcId()) {
46900         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
46901         hash = (53 * hash) + hashLong(getProcId());
46902       }
46903       hash = (29 * hash) + getUnknownFields().hashCode();
46904       memoizedHashCode = hash;
46905       return hash;
46906     }
46907 
parseFrom( com.google.protobuf.ByteString data)46908     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(
46909         com.google.protobuf.ByteString data)
46910         throws com.google.protobuf.InvalidProtocolBufferException {
46911       return PARSER.parseFrom(data);
46912     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46913     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(
46914         com.google.protobuf.ByteString data,
46915         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46916         throws com.google.protobuf.InvalidProtocolBufferException {
46917       return PARSER.parseFrom(data, extensionRegistry);
46918     }
parseFrom(byte[] data)46919     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(byte[] data)
46920         throws com.google.protobuf.InvalidProtocolBufferException {
46921       return PARSER.parseFrom(data);
46922     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46923     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(
46924         byte[] data,
46925         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46926         throws com.google.protobuf.InvalidProtocolBufferException {
46927       return PARSER.parseFrom(data, extensionRegistry);
46928     }
parseFrom(java.io.InputStream input)46929     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(java.io.InputStream input)
46930         throws java.io.IOException {
46931       return PARSER.parseFrom(input);
46932     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46933     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(
46934         java.io.InputStream input,
46935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46936         throws java.io.IOException {
46937       return PARSER.parseFrom(input, extensionRegistry);
46938     }
parseDelimitedFrom(java.io.InputStream input)46939     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseDelimitedFrom(java.io.InputStream input)
46940         throws java.io.IOException {
46941       return PARSER.parseDelimitedFrom(input);
46942     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46943     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseDelimitedFrom(
46944         java.io.InputStream input,
46945         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46946         throws java.io.IOException {
46947       return PARSER.parseDelimitedFrom(input, extensionRegistry);
46948     }
parseFrom( com.google.protobuf.CodedInputStream input)46949     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(
46950         com.google.protobuf.CodedInputStream input)
46951         throws java.io.IOException {
46952       return PARSER.parseFrom(input);
46953     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)46954     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parseFrom(
46955         com.google.protobuf.CodedInputStream input,
46956         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
46957         throws java.io.IOException {
46958       return PARSER.parseFrom(input, extensionRegistry);
46959     }
46960 
newBuilder()46961     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()46962     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest prototype)46963     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest prototype) {
46964       return newBuilder().mergeFrom(prototype);
46965     }
toBuilder()46966     public Builder toBuilder() { return newBuilder(this); }
46967 
46968     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)46969     protected Builder newBuilderForType(
46970         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
46971       Builder builder = new Builder(parent);
46972       return builder;
46973     }
46974     /**
46975      * Protobuf type {@code GetProcedureResultRequest}
46976      */
46977     public static final class Builder extends
46978         com.google.protobuf.GeneratedMessage.Builder<Builder>
46979        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequestOrBuilder {
46980       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()46981           getDescriptor() {
46982         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultRequest_descriptor;
46983       }
46984 
46985       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()46986           internalGetFieldAccessorTable() {
46987         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultRequest_fieldAccessorTable
46988             .ensureFieldAccessorsInitialized(
46989                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.Builder.class);
46990       }
46991 
46992       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.newBuilder()
Builder()46993       private Builder() {
46994         maybeForceBuilderInitialization();
46995       }
46996 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)46997       private Builder(
46998           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
46999         super(parent);
47000         maybeForceBuilderInitialization();
47001       }
maybeForceBuilderInitialization()47002       private void maybeForceBuilderInitialization() {
47003         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
47004         }
47005       }
create()47006       private static Builder create() {
47007         return new Builder();
47008       }
47009 
clear()47010       public Builder clear() {
47011         super.clear();
47012         procId_ = 0L;
47013         bitField0_ = (bitField0_ & ~0x00000001);
47014         return this;
47015       }
47016 
clone()47017       public Builder clone() {
47018         return create().mergeFrom(buildPartial());
47019       }
47020 
47021       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()47022           getDescriptorForType() {
47023         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultRequest_descriptor;
47024       }
47025 
getDefaultInstanceForType()47026       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest getDefaultInstanceForType() {
47027         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
47028       }
47029 
build()47030       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest build() {
47031         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest result = buildPartial();
47032         if (!result.isInitialized()) {
47033           throw newUninitializedMessageException(result);
47034         }
47035         return result;
47036       }
47037 
buildPartial()47038       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest buildPartial() {
47039         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest(this);
47040         int from_bitField0_ = bitField0_;
47041         int to_bitField0_ = 0;
47042         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
47043           to_bitField0_ |= 0x00000001;
47044         }
47045         result.procId_ = procId_;
47046         result.bitField0_ = to_bitField0_;
47047         onBuilt();
47048         return result;
47049       }
47050 
mergeFrom(com.google.protobuf.Message other)47051       public Builder mergeFrom(com.google.protobuf.Message other) {
47052         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest) {
47053           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)other);
47054         } else {
47055           super.mergeFrom(other);
47056           return this;
47057         }
47058       }
47059 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest other)47060       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest other) {
47061         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance()) return this;
47062         if (other.hasProcId()) {
47063           setProcId(other.getProcId());
47064         }
47065         this.mergeUnknownFields(other.getUnknownFields());
47066         return this;
47067       }
47068 
isInitialized()47069       public final boolean isInitialized() {
47070         if (!hasProcId()) {
47071 
47072           return false;
47073         }
47074         return true;
47075       }
47076 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47077       public Builder mergeFrom(
47078           com.google.protobuf.CodedInputStream input,
47079           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47080           throws java.io.IOException {
47081         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest parsedMessage = null;
47082         try {
47083           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
47084         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
47085           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest) e.getUnfinishedMessage();
47086           throw e;
47087         } finally {
47088           if (parsedMessage != null) {
47089             mergeFrom(parsedMessage);
47090           }
47091         }
47092         return this;
47093       }
47094       private int bitField0_;
47095 
47096       // required uint64 proc_id = 1;
47097       private long procId_ ;
47098       /**
47099        * <code>required uint64 proc_id = 1;</code>
47100        */
hasProcId()47101       public boolean hasProcId() {
47102         return ((bitField0_ & 0x00000001) == 0x00000001);
47103       }
47104       /**
47105        * <code>required uint64 proc_id = 1;</code>
47106        */
getProcId()47107       public long getProcId() {
47108         return procId_;
47109       }
47110       /**
47111        * <code>required uint64 proc_id = 1;</code>
47112        */
setProcId(long value)47113       public Builder setProcId(long value) {
47114         bitField0_ |= 0x00000001;
47115         procId_ = value;
47116         onChanged();
47117         return this;
47118       }
47119       /**
47120        * <code>required uint64 proc_id = 1;</code>
47121        */
clearProcId()47122       public Builder clearProcId() {
47123         bitField0_ = (bitField0_ & ~0x00000001);
47124         procId_ = 0L;
47125         onChanged();
47126         return this;
47127       }
47128 
47129       // @@protoc_insertion_point(builder_scope:GetProcedureResultRequest)
47130     }
47131 
47132     static {
47133       defaultInstance = new GetProcedureResultRequest(true);
defaultInstance.initFields()47134       defaultInstance.initFields();
47135     }
47136 
47137     // @@protoc_insertion_point(class_scope:GetProcedureResultRequest)
47138   }
47139 
47140   public interface GetProcedureResultResponseOrBuilder
47141       extends com.google.protobuf.MessageOrBuilder {
47142 
47143     // required .GetProcedureResultResponse.State state = 1;
47144     /**
47145      * <code>required .GetProcedureResultResponse.State state = 1;</code>
47146      */
hasState()47147     boolean hasState();
47148     /**
47149      * <code>required .GetProcedureResultResponse.State state = 1;</code>
47150      */
getState()47151     org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State getState();
47152 
47153     // optional uint64 start_time = 2;
47154     /**
47155      * <code>optional uint64 start_time = 2;</code>
47156      */
hasStartTime()47157     boolean hasStartTime();
47158     /**
47159      * <code>optional uint64 start_time = 2;</code>
47160      */
getStartTime()47161     long getStartTime();
47162 
47163     // optional uint64 last_update = 3;
47164     /**
47165      * <code>optional uint64 last_update = 3;</code>
47166      */
hasLastUpdate()47167     boolean hasLastUpdate();
47168     /**
47169      * <code>optional uint64 last_update = 3;</code>
47170      */
getLastUpdate()47171     long getLastUpdate();
47172 
47173     // optional bytes result = 4;
47174     /**
47175      * <code>optional bytes result = 4;</code>
47176      */
hasResult()47177     boolean hasResult();
47178     /**
47179      * <code>optional bytes result = 4;</code>
47180      */
getResult()47181     com.google.protobuf.ByteString getResult();
47182 
47183     // optional .ForeignExceptionMessage exception = 5;
47184     /**
47185      * <code>optional .ForeignExceptionMessage exception = 5;</code>
47186      */
hasException()47187     boolean hasException();
47188     /**
47189      * <code>optional .ForeignExceptionMessage exception = 5;</code>
47190      */
getException()47191     org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException();
47192     /**
47193      * <code>optional .ForeignExceptionMessage exception = 5;</code>
47194      */
getExceptionOrBuilder()47195     org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder();
47196   }
47197   /**
47198    * Protobuf type {@code GetProcedureResultResponse}
47199    */
47200   public static final class GetProcedureResultResponse extends
47201       com.google.protobuf.GeneratedMessage
47202       implements GetProcedureResultResponseOrBuilder {
47203     // Use GetProcedureResultResponse.newBuilder() to construct.
GetProcedureResultResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)47204     private GetProcedureResultResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
47205       super(builder);
47206       this.unknownFields = builder.getUnknownFields();
47207     }
GetProcedureResultResponse(boolean noInit)47208     private GetProcedureResultResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
47209 
47210     private static final GetProcedureResultResponse defaultInstance;
getDefaultInstance()47211     public static GetProcedureResultResponse getDefaultInstance() {
47212       return defaultInstance;
47213     }
47214 
getDefaultInstanceForType()47215     public GetProcedureResultResponse getDefaultInstanceForType() {
47216       return defaultInstance;
47217     }
47218 
47219     private final com.google.protobuf.UnknownFieldSet unknownFields;
47220     @java.lang.Override
47221     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()47222         getUnknownFields() {
47223       return this.unknownFields;
47224     }
GetProcedureResultResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47225     private GetProcedureResultResponse(
47226         com.google.protobuf.CodedInputStream input,
47227         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47228         throws com.google.protobuf.InvalidProtocolBufferException {
47229       initFields();
47230       int mutable_bitField0_ = 0;
47231       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
47232           com.google.protobuf.UnknownFieldSet.newBuilder();
47233       try {
47234         boolean done = false;
47235         while (!done) {
47236           int tag = input.readTag();
47237           switch (tag) {
47238             case 0:
47239               done = true;
47240               break;
47241             default: {
47242               if (!parseUnknownField(input, unknownFields,
47243                                      extensionRegistry, tag)) {
47244                 done = true;
47245               }
47246               break;
47247             }
47248             case 8: {
47249               int rawValue = input.readEnum();
47250               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.valueOf(rawValue);
47251               if (value == null) {
47252                 unknownFields.mergeVarintField(1, rawValue);
47253               } else {
47254                 bitField0_ |= 0x00000001;
47255                 state_ = value;
47256               }
47257               break;
47258             }
47259             case 16: {
47260               bitField0_ |= 0x00000002;
47261               startTime_ = input.readUInt64();
47262               break;
47263             }
47264             case 24: {
47265               bitField0_ |= 0x00000004;
47266               lastUpdate_ = input.readUInt64();
47267               break;
47268             }
47269             case 34: {
47270               bitField0_ |= 0x00000008;
47271               result_ = input.readBytes();
47272               break;
47273             }
47274             case 42: {
47275               org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder subBuilder = null;
47276               if (((bitField0_ & 0x00000010) == 0x00000010)) {
47277                 subBuilder = exception_.toBuilder();
47278               }
47279               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.PARSER, extensionRegistry);
47280               if (subBuilder != null) {
47281                 subBuilder.mergeFrom(exception_);
47282                 exception_ = subBuilder.buildPartial();
47283               }
47284               bitField0_ |= 0x00000010;
47285               break;
47286             }
47287           }
47288         }
47289       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
47290         throw e.setUnfinishedMessage(this);
47291       } catch (java.io.IOException e) {
47292         throw new com.google.protobuf.InvalidProtocolBufferException(
47293             e.getMessage()).setUnfinishedMessage(this);
47294       } finally {
47295         this.unknownFields = unknownFields.build();
47296         makeExtensionsImmutable();
47297       }
47298     }
47299     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()47300         getDescriptor() {
47301       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultResponse_descriptor;
47302     }
47303 
47304     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()47305         internalGetFieldAccessorTable() {
47306       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultResponse_fieldAccessorTable
47307           .ensureFieldAccessorsInitialized(
47308               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.Builder.class);
47309     }
47310 
47311     public static com.google.protobuf.Parser<GetProcedureResultResponse> PARSER =
47312         new com.google.protobuf.AbstractParser<GetProcedureResultResponse>() {
47313       public GetProcedureResultResponse parsePartialFrom(
47314           com.google.protobuf.CodedInputStream input,
47315           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47316           throws com.google.protobuf.InvalidProtocolBufferException {
47317         return new GetProcedureResultResponse(input, extensionRegistry);
47318       }
47319     };
47320 
47321     @java.lang.Override
getParserForType()47322     public com.google.protobuf.Parser<GetProcedureResultResponse> getParserForType() {
47323       return PARSER;
47324     }
47325 
47326     /**
47327      * Protobuf enum {@code GetProcedureResultResponse.State}
47328      */
47329     public enum State
47330         implements com.google.protobuf.ProtocolMessageEnum {
47331       /**
47332        * <code>NOT_FOUND = 0;</code>
47333        */
47334       NOT_FOUND(0, 0),
47335       /**
47336        * <code>RUNNING = 1;</code>
47337        */
47338       RUNNING(1, 1),
47339       /**
47340        * <code>FINISHED = 2;</code>
47341        */
47342       FINISHED(2, 2),
47343       ;
47344 
47345       /**
47346        * <code>NOT_FOUND = 0;</code>
47347        */
47348       public static final int NOT_FOUND_VALUE = 0;
47349       /**
47350        * <code>RUNNING = 1;</code>
47351        */
47352       public static final int RUNNING_VALUE = 1;
47353       /**
47354        * <code>FINISHED = 2;</code>
47355        */
47356       public static final int FINISHED_VALUE = 2;
47357 
47358 
getNumber()47359       public final int getNumber() { return value; }
47360 
valueOf(int value)47361       public static State valueOf(int value) {
47362         switch (value) {
47363           case 0: return NOT_FOUND;
47364           case 1: return RUNNING;
47365           case 2: return FINISHED;
47366           default: return null;
47367         }
47368       }
47369 
47370       public static com.google.protobuf.Internal.EnumLiteMap<State>
internalGetValueMap()47371           internalGetValueMap() {
47372         return internalValueMap;
47373       }
47374       private static com.google.protobuf.Internal.EnumLiteMap<State>
47375           internalValueMap =
47376             new com.google.protobuf.Internal.EnumLiteMap<State>() {
47377               public State findValueByNumber(int number) {
47378                 return State.valueOf(number);
47379               }
47380             };
47381 
47382       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()47383           getValueDescriptor() {
47384         return getDescriptor().getValues().get(index);
47385       }
47386       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()47387           getDescriptorForType() {
47388         return getDescriptor();
47389       }
47390       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()47391           getDescriptor() {
47392         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDescriptor().getEnumTypes().get(0);
47393       }
47394 
47395       private static final State[] VALUES = values();
47396 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)47397       public static State valueOf(
47398           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
47399         if (desc.getType() != getDescriptor()) {
47400           throw new java.lang.IllegalArgumentException(
47401             "EnumValueDescriptor is not for this type.");
47402         }
47403         return VALUES[desc.getIndex()];
47404       }
47405 
47406       private final int index;
47407       private final int value;
47408 
State(int index, int value)47409       private State(int index, int value) {
47410         this.index = index;
47411         this.value = value;
47412       }
47413 
47414       // @@protoc_insertion_point(enum_scope:GetProcedureResultResponse.State)
47415     }
47416 
47417     private int bitField0_;
47418     // required .GetProcedureResultResponse.State state = 1;
47419     public static final int STATE_FIELD_NUMBER = 1;
47420     private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State state_;
47421     /**
47422      * <code>required .GetProcedureResultResponse.State state = 1;</code>
47423      */
hasState()47424     public boolean hasState() {
47425       return ((bitField0_ & 0x00000001) == 0x00000001);
47426     }
47427     /**
47428      * <code>required .GetProcedureResultResponse.State state = 1;</code>
47429      */
getState()47430     public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State getState() {
47431       return state_;
47432     }
47433 
47434     // optional uint64 start_time = 2;
47435     public static final int START_TIME_FIELD_NUMBER = 2;
47436     private long startTime_;
47437     /**
47438      * <code>optional uint64 start_time = 2;</code>
47439      */
hasStartTime()47440     public boolean hasStartTime() {
47441       return ((bitField0_ & 0x00000002) == 0x00000002);
47442     }
47443     /**
47444      * <code>optional uint64 start_time = 2;</code>
47445      */
getStartTime()47446     public long getStartTime() {
47447       return startTime_;
47448     }
47449 
47450     // optional uint64 last_update = 3;
47451     public static final int LAST_UPDATE_FIELD_NUMBER = 3;
47452     private long lastUpdate_;
47453     /**
47454      * <code>optional uint64 last_update = 3;</code>
47455      */
hasLastUpdate()47456     public boolean hasLastUpdate() {
47457       return ((bitField0_ & 0x00000004) == 0x00000004);
47458     }
47459     /**
47460      * <code>optional uint64 last_update = 3;</code>
47461      */
getLastUpdate()47462     public long getLastUpdate() {
47463       return lastUpdate_;
47464     }
47465 
47466     // optional bytes result = 4;
47467     public static final int RESULT_FIELD_NUMBER = 4;
47468     private com.google.protobuf.ByteString result_;
47469     /**
47470      * <code>optional bytes result = 4;</code>
47471      */
hasResult()47472     public boolean hasResult() {
47473       return ((bitField0_ & 0x00000008) == 0x00000008);
47474     }
47475     /**
47476      * <code>optional bytes result = 4;</code>
47477      */
getResult()47478     public com.google.protobuf.ByteString getResult() {
47479       return result_;
47480     }
47481 
47482     // optional .ForeignExceptionMessage exception = 5;
47483     public static final int EXCEPTION_FIELD_NUMBER = 5;
47484     private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_;
47485     /**
47486      * <code>optional .ForeignExceptionMessage exception = 5;</code>
47487      */
hasException()47488     public boolean hasException() {
47489       return ((bitField0_ & 0x00000010) == 0x00000010);
47490     }
47491     /**
47492      * <code>optional .ForeignExceptionMessage exception = 5;</code>
47493      */
getException()47494     public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() {
47495       return exception_;
47496     }
47497     /**
47498      * <code>optional .ForeignExceptionMessage exception = 5;</code>
47499      */
getExceptionOrBuilder()47500     public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() {
47501       return exception_;
47502     }
47503 
initFields()47504     private void initFields() {
47505       state_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND;
47506       startTime_ = 0L;
47507       lastUpdate_ = 0L;
47508       result_ = com.google.protobuf.ByteString.EMPTY;
47509       exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance();
47510     }
47511     private byte memoizedIsInitialized = -1;
isInitialized()47512     public final boolean isInitialized() {
47513       byte isInitialized = memoizedIsInitialized;
47514       if (isInitialized != -1) return isInitialized == 1;
47515 
47516       if (!hasState()) {
47517         memoizedIsInitialized = 0;
47518         return false;
47519       }
47520       memoizedIsInitialized = 1;
47521       return true;
47522     }
47523 
writeTo(com.google.protobuf.CodedOutputStream output)47524     public void writeTo(com.google.protobuf.CodedOutputStream output)
47525                         throws java.io.IOException {
47526       getSerializedSize();
47527       if (((bitField0_ & 0x00000001) == 0x00000001)) {
47528         output.writeEnum(1, state_.getNumber());
47529       }
47530       if (((bitField0_ & 0x00000002) == 0x00000002)) {
47531         output.writeUInt64(2, startTime_);
47532       }
47533       if (((bitField0_ & 0x00000004) == 0x00000004)) {
47534         output.writeUInt64(3, lastUpdate_);
47535       }
47536       if (((bitField0_ & 0x00000008) == 0x00000008)) {
47537         output.writeBytes(4, result_);
47538       }
47539       if (((bitField0_ & 0x00000010) == 0x00000010)) {
47540         output.writeMessage(5, exception_);
47541       }
47542       getUnknownFields().writeTo(output);
47543     }
47544 
47545     private int memoizedSerializedSize = -1;
getSerializedSize()47546     public int getSerializedSize() {
47547       int size = memoizedSerializedSize;
47548       if (size != -1) return size;
47549 
47550       size = 0;
47551       if (((bitField0_ & 0x00000001) == 0x00000001)) {
47552         size += com.google.protobuf.CodedOutputStream
47553           .computeEnumSize(1, state_.getNumber());
47554       }
47555       if (((bitField0_ & 0x00000002) == 0x00000002)) {
47556         size += com.google.protobuf.CodedOutputStream
47557           .computeUInt64Size(2, startTime_);
47558       }
47559       if (((bitField0_ & 0x00000004) == 0x00000004)) {
47560         size += com.google.protobuf.CodedOutputStream
47561           .computeUInt64Size(3, lastUpdate_);
47562       }
47563       if (((bitField0_ & 0x00000008) == 0x00000008)) {
47564         size += com.google.protobuf.CodedOutputStream
47565           .computeBytesSize(4, result_);
47566       }
47567       if (((bitField0_ & 0x00000010) == 0x00000010)) {
47568         size += com.google.protobuf.CodedOutputStream
47569           .computeMessageSize(5, exception_);
47570       }
47571       size += getUnknownFields().getSerializedSize();
47572       memoizedSerializedSize = size;
47573       return size;
47574     }
47575 
47576     private static final long serialVersionUID = 0L;
47577     @java.lang.Override
writeReplace()47578     protected java.lang.Object writeReplace()
47579         throws java.io.ObjectStreamException {
47580       return super.writeReplace();
47581     }
47582 
47583     @java.lang.Override
equals(final java.lang.Object obj)47584     public boolean equals(final java.lang.Object obj) {
47585       if (obj == this) {
47586        return true;
47587       }
47588       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse)) {
47589         return super.equals(obj);
47590       }
47591       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse) obj;
47592 
47593       boolean result = true;
47594       result = result && (hasState() == other.hasState());
47595       if (hasState()) {
47596         result = result &&
47597             (getState() == other.getState());
47598       }
47599       result = result && (hasStartTime() == other.hasStartTime());
47600       if (hasStartTime()) {
47601         result = result && (getStartTime()
47602             == other.getStartTime());
47603       }
47604       result = result && (hasLastUpdate() == other.hasLastUpdate());
47605       if (hasLastUpdate()) {
47606         result = result && (getLastUpdate()
47607             == other.getLastUpdate());
47608       }
47609       result = result && (hasResult() == other.hasResult());
47610       if (hasResult()) {
47611         result = result && getResult()
47612             .equals(other.getResult());
47613       }
47614       result = result && (hasException() == other.hasException());
47615       if (hasException()) {
47616         result = result && getException()
47617             .equals(other.getException());
47618       }
47619       result = result &&
47620           getUnknownFields().equals(other.getUnknownFields());
47621       return result;
47622     }
47623 
47624     private int memoizedHashCode = 0;
47625     @java.lang.Override
hashCode()47626     public int hashCode() {
47627       if (memoizedHashCode != 0) {
47628         return memoizedHashCode;
47629       }
47630       int hash = 41;
47631       hash = (19 * hash) + getDescriptorForType().hashCode();
47632       if (hasState()) {
47633         hash = (37 * hash) + STATE_FIELD_NUMBER;
47634         hash = (53 * hash) + hashEnum(getState());
47635       }
47636       if (hasStartTime()) {
47637         hash = (37 * hash) + START_TIME_FIELD_NUMBER;
47638         hash = (53 * hash) + hashLong(getStartTime());
47639       }
47640       if (hasLastUpdate()) {
47641         hash = (37 * hash) + LAST_UPDATE_FIELD_NUMBER;
47642         hash = (53 * hash) + hashLong(getLastUpdate());
47643       }
47644       if (hasResult()) {
47645         hash = (37 * hash) + RESULT_FIELD_NUMBER;
47646         hash = (53 * hash) + getResult().hashCode();
47647       }
47648       if (hasException()) {
47649         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
47650         hash = (53 * hash) + getException().hashCode();
47651       }
47652       hash = (29 * hash) + getUnknownFields().hashCode();
47653       memoizedHashCode = hash;
47654       return hash;
47655     }
47656 
parseFrom( com.google.protobuf.ByteString data)47657     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(
47658         com.google.protobuf.ByteString data)
47659         throws com.google.protobuf.InvalidProtocolBufferException {
47660       return PARSER.parseFrom(data);
47661     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47662     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(
47663         com.google.protobuf.ByteString data,
47664         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47665         throws com.google.protobuf.InvalidProtocolBufferException {
47666       return PARSER.parseFrom(data, extensionRegistry);
47667     }
parseFrom(byte[] data)47668     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(byte[] data)
47669         throws com.google.protobuf.InvalidProtocolBufferException {
47670       return PARSER.parseFrom(data);
47671     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47672     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(
47673         byte[] data,
47674         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47675         throws com.google.protobuf.InvalidProtocolBufferException {
47676       return PARSER.parseFrom(data, extensionRegistry);
47677     }
parseFrom(java.io.InputStream input)47678     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(java.io.InputStream input)
47679         throws java.io.IOException {
47680       return PARSER.parseFrom(input);
47681     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47682     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(
47683         java.io.InputStream input,
47684         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47685         throws java.io.IOException {
47686       return PARSER.parseFrom(input, extensionRegistry);
47687     }
parseDelimitedFrom(java.io.InputStream input)47688     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseDelimitedFrom(java.io.InputStream input)
47689         throws java.io.IOException {
47690       return PARSER.parseDelimitedFrom(input);
47691     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47692     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseDelimitedFrom(
47693         java.io.InputStream input,
47694         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47695         throws java.io.IOException {
47696       return PARSER.parseDelimitedFrom(input, extensionRegistry);
47697     }
parseFrom( com.google.protobuf.CodedInputStream input)47698     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(
47699         com.google.protobuf.CodedInputStream input)
47700         throws java.io.IOException {
47701       return PARSER.parseFrom(input);
47702     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47703     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parseFrom(
47704         com.google.protobuf.CodedInputStream input,
47705         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47706         throws java.io.IOException {
47707       return PARSER.parseFrom(input, extensionRegistry);
47708     }
47709 
newBuilder()47710     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()47711     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse prototype)47712     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse prototype) {
47713       return newBuilder().mergeFrom(prototype);
47714     }
toBuilder()47715     public Builder toBuilder() { return newBuilder(this); }
47716 
47717     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)47718     protected Builder newBuilderForType(
47719         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
47720       Builder builder = new Builder(parent);
47721       return builder;
47722     }
47723     /**
47724      * Protobuf type {@code GetProcedureResultResponse}
47725      */
47726     public static final class Builder extends
47727         com.google.protobuf.GeneratedMessage.Builder<Builder>
47728        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponseOrBuilder {
47729       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()47730           getDescriptor() {
47731         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultResponse_descriptor;
47732       }
47733 
47734       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()47735           internalGetFieldAccessorTable() {
47736         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultResponse_fieldAccessorTable
47737             .ensureFieldAccessorsInitialized(
47738                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.Builder.class);
47739       }
47740 
47741       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.newBuilder()
Builder()47742       private Builder() {
47743         maybeForceBuilderInitialization();
47744       }
47745 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)47746       private Builder(
47747           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
47748         super(parent);
47749         maybeForceBuilderInitialization();
47750       }
maybeForceBuilderInitialization()47751       private void maybeForceBuilderInitialization() {
47752         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
47753           getExceptionFieldBuilder();
47754         }
47755       }
create()47756       private static Builder create() {
47757         return new Builder();
47758       }
47759 
clear()47760       public Builder clear() {
47761         super.clear();
47762         state_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND;
47763         bitField0_ = (bitField0_ & ~0x00000001);
47764         startTime_ = 0L;
47765         bitField0_ = (bitField0_ & ~0x00000002);
47766         lastUpdate_ = 0L;
47767         bitField0_ = (bitField0_ & ~0x00000004);
47768         result_ = com.google.protobuf.ByteString.EMPTY;
47769         bitField0_ = (bitField0_ & ~0x00000008);
47770         if (exceptionBuilder_ == null) {
47771           exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance();
47772         } else {
47773           exceptionBuilder_.clear();
47774         }
47775         bitField0_ = (bitField0_ & ~0x00000010);
47776         return this;
47777       }
47778 
clone()47779       public Builder clone() {
47780         return create().mergeFrom(buildPartial());
47781       }
47782 
47783       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()47784           getDescriptorForType() {
47785         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_GetProcedureResultResponse_descriptor;
47786       }
47787 
getDefaultInstanceForType()47788       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse getDefaultInstanceForType() {
47789         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
47790       }
47791 
build()47792       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse build() {
47793         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse result = buildPartial();
47794         if (!result.isInitialized()) {
47795           throw newUninitializedMessageException(result);
47796         }
47797         return result;
47798       }
47799 
buildPartial()47800       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse buildPartial() {
47801         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse(this);
47802         int from_bitField0_ = bitField0_;
47803         int to_bitField0_ = 0;
47804         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
47805           to_bitField0_ |= 0x00000001;
47806         }
47807         result.state_ = state_;
47808         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
47809           to_bitField0_ |= 0x00000002;
47810         }
47811         result.startTime_ = startTime_;
47812         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
47813           to_bitField0_ |= 0x00000004;
47814         }
47815         result.lastUpdate_ = lastUpdate_;
47816         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
47817           to_bitField0_ |= 0x00000008;
47818         }
47819         result.result_ = result_;
47820         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
47821           to_bitField0_ |= 0x00000010;
47822         }
47823         if (exceptionBuilder_ == null) {
47824           result.exception_ = exception_;
47825         } else {
47826           result.exception_ = exceptionBuilder_.build();
47827         }
47828         result.bitField0_ = to_bitField0_;
47829         onBuilt();
47830         return result;
47831       }
47832 
mergeFrom(com.google.protobuf.Message other)47833       public Builder mergeFrom(com.google.protobuf.Message other) {
47834         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse) {
47835           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse)other);
47836         } else {
47837           super.mergeFrom(other);
47838           return this;
47839         }
47840       }
47841 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse other)47842       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse other) {
47843         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance()) return this;
47844         if (other.hasState()) {
47845           setState(other.getState());
47846         }
47847         if (other.hasStartTime()) {
47848           setStartTime(other.getStartTime());
47849         }
47850         if (other.hasLastUpdate()) {
47851           setLastUpdate(other.getLastUpdate());
47852         }
47853         if (other.hasResult()) {
47854           setResult(other.getResult());
47855         }
47856         if (other.hasException()) {
47857           mergeException(other.getException());
47858         }
47859         this.mergeUnknownFields(other.getUnknownFields());
47860         return this;
47861       }
47862 
isInitialized()47863       public final boolean isInitialized() {
47864         if (!hasState()) {
47865 
47866           return false;
47867         }
47868         return true;
47869       }
47870 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)47871       public Builder mergeFrom(
47872           com.google.protobuf.CodedInputStream input,
47873           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
47874           throws java.io.IOException {
47875         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse parsedMessage = null;
47876         try {
47877           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
47878         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
47879           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse) e.getUnfinishedMessage();
47880           throw e;
47881         } finally {
47882           if (parsedMessage != null) {
47883             mergeFrom(parsedMessage);
47884           }
47885         }
47886         return this;
47887       }
47888       private int bitField0_;
47889 
47890       // required .GetProcedureResultResponse.State state = 1;
47891       private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State state_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND;
47892       /**
47893        * <code>required .GetProcedureResultResponse.State state = 1;</code>
47894        */
hasState()47895       public boolean hasState() {
47896         return ((bitField0_ & 0x00000001) == 0x00000001);
47897       }
47898       /**
47899        * <code>required .GetProcedureResultResponse.State state = 1;</code>
47900        */
getState()47901       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State getState() {
47902         return state_;
47903       }
47904       /**
47905        * <code>required .GetProcedureResultResponse.State state = 1;</code>
47906        */
setState(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State value)47907       public Builder setState(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State value) {
47908         if (value == null) {
47909           throw new NullPointerException();
47910         }
47911         bitField0_ |= 0x00000001;
47912         state_ = value;
47913         onChanged();
47914         return this;
47915       }
47916       /**
47917        * <code>required .GetProcedureResultResponse.State state = 1;</code>
47918        */
clearState()47919       public Builder clearState() {
47920         bitField0_ = (bitField0_ & ~0x00000001);
47921         state_ = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.State.NOT_FOUND;
47922         onChanged();
47923         return this;
47924       }
47925 
47926       // optional uint64 start_time = 2;
47927       private long startTime_ ;
47928       /**
47929        * <code>optional uint64 start_time = 2;</code>
47930        */
hasStartTime()47931       public boolean hasStartTime() {
47932         return ((bitField0_ & 0x00000002) == 0x00000002);
47933       }
47934       /**
47935        * <code>optional uint64 start_time = 2;</code>
47936        */
getStartTime()47937       public long getStartTime() {
47938         return startTime_;
47939       }
47940       /**
47941        * <code>optional uint64 start_time = 2;</code>
47942        */
setStartTime(long value)47943       public Builder setStartTime(long value) {
47944         bitField0_ |= 0x00000002;
47945         startTime_ = value;
47946         onChanged();
47947         return this;
47948       }
47949       /**
47950        * <code>optional uint64 start_time = 2;</code>
47951        */
clearStartTime()47952       public Builder clearStartTime() {
47953         bitField0_ = (bitField0_ & ~0x00000002);
47954         startTime_ = 0L;
47955         onChanged();
47956         return this;
47957       }
47958 
47959       // optional uint64 last_update = 3;
47960       private long lastUpdate_ ;
47961       /**
47962        * <code>optional uint64 last_update = 3;</code>
47963        */
hasLastUpdate()47964       public boolean hasLastUpdate() {
47965         return ((bitField0_ & 0x00000004) == 0x00000004);
47966       }
47967       /**
47968        * <code>optional uint64 last_update = 3;</code>
47969        */
getLastUpdate()47970       public long getLastUpdate() {
47971         return lastUpdate_;
47972       }
47973       /**
47974        * <code>optional uint64 last_update = 3;</code>
47975        */
setLastUpdate(long value)47976       public Builder setLastUpdate(long value) {
47977         bitField0_ |= 0x00000004;
47978         lastUpdate_ = value;
47979         onChanged();
47980         return this;
47981       }
47982       /**
47983        * <code>optional uint64 last_update = 3;</code>
47984        */
clearLastUpdate()47985       public Builder clearLastUpdate() {
47986         bitField0_ = (bitField0_ & ~0x00000004);
47987         lastUpdate_ = 0L;
47988         onChanged();
47989         return this;
47990       }
47991 
47992       // optional bytes result = 4;
47993       private com.google.protobuf.ByteString result_ = com.google.protobuf.ByteString.EMPTY;
47994       /**
47995        * <code>optional bytes result = 4;</code>
47996        */
hasResult()47997       public boolean hasResult() {
47998         return ((bitField0_ & 0x00000008) == 0x00000008);
47999       }
48000       /**
48001        * <code>optional bytes result = 4;</code>
48002        */
getResult()48003       public com.google.protobuf.ByteString getResult() {
48004         return result_;
48005       }
48006       /**
48007        * <code>optional bytes result = 4;</code>
48008        */
setResult(com.google.protobuf.ByteString value)48009       public Builder setResult(com.google.protobuf.ByteString value) {
48010         if (value == null) {
48011     throw new NullPointerException();
48012   }
48013   bitField0_ |= 0x00000008;
48014         result_ = value;
48015         onChanged();
48016         return this;
48017       }
48018       /**
48019        * <code>optional bytes result = 4;</code>
48020        */
clearResult()48021       public Builder clearResult() {
48022         bitField0_ = (bitField0_ & ~0x00000008);
48023         result_ = getDefaultInstance().getResult();
48024         onChanged();
48025         return this;
48026       }
48027 
48028       // optional .ForeignExceptionMessage exception = 5;
48029       private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance();
48030       private com.google.protobuf.SingleFieldBuilder<
48031           org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder> exceptionBuilder_;
48032       /**
48033        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48034        */
hasException()48035       public boolean hasException() {
48036         return ((bitField0_ & 0x00000010) == 0x00000010);
48037       }
48038       /**
48039        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48040        */
getException()48041       public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getException() {
48042         if (exceptionBuilder_ == null) {
48043           return exception_;
48044         } else {
48045           return exceptionBuilder_.getMessage();
48046         }
48047       }
48048       /**
48049        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48050        */
setException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value)48051       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) {
48052         if (exceptionBuilder_ == null) {
48053           if (value == null) {
48054             throw new NullPointerException();
48055           }
48056           exception_ = value;
48057           onChanged();
48058         } else {
48059           exceptionBuilder_.setMessage(value);
48060         }
48061         bitField0_ |= 0x00000010;
48062         return this;
48063       }
48064       /**
48065        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48066        */
setException( org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder builderForValue)48067       public Builder setException(
48068           org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder builderForValue) {
48069         if (exceptionBuilder_ == null) {
48070           exception_ = builderForValue.build();
48071           onChanged();
48072         } else {
48073           exceptionBuilder_.setMessage(builderForValue.build());
48074         }
48075         bitField0_ |= 0x00000010;
48076         return this;
48077       }
48078       /**
48079        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48080        */
mergeException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value)48081       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage value) {
48082         if (exceptionBuilder_ == null) {
48083           if (((bitField0_ & 0x00000010) == 0x00000010) &&
48084               exception_ != org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance()) {
48085             exception_ =
48086               org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.newBuilder(exception_).mergeFrom(value).buildPartial();
48087           } else {
48088             exception_ = value;
48089           }
48090           onChanged();
48091         } else {
48092           exceptionBuilder_.mergeFrom(value);
48093         }
48094         bitField0_ |= 0x00000010;
48095         return this;
48096       }
48097       /**
48098        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48099        */
clearException()48100       public Builder clearException() {
48101         if (exceptionBuilder_ == null) {
48102           exception_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.getDefaultInstance();
48103           onChanged();
48104         } else {
48105           exceptionBuilder_.clear();
48106         }
48107         bitField0_ = (bitField0_ & ~0x00000010);
48108         return this;
48109       }
48110       /**
48111        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48112        */
getExceptionBuilder()48113       public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder getExceptionBuilder() {
48114         bitField0_ |= 0x00000010;
48115         onChanged();
48116         return getExceptionFieldBuilder().getBuilder();
48117       }
48118       /**
48119        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48120        */
getExceptionOrBuilder()48121       public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder getExceptionOrBuilder() {
48122         if (exceptionBuilder_ != null) {
48123           return exceptionBuilder_.getMessageOrBuilder();
48124         } else {
48125           return exception_;
48126         }
48127       }
48128       /**
48129        * <code>optional .ForeignExceptionMessage exception = 5;</code>
48130        */
48131       private com.google.protobuf.SingleFieldBuilder<
48132           org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder>
getExceptionFieldBuilder()48133           getExceptionFieldBuilder() {
48134         if (exceptionBuilder_ == null) {
48135           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
48136               org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder>(
48137                   exception_,
48138                   getParentForChildren(),
48139                   isClean());
48140           exception_ = null;
48141         }
48142         return exceptionBuilder_;
48143       }
48144 
48145       // @@protoc_insertion_point(builder_scope:GetProcedureResultResponse)
48146     }
48147 
48148     static {
48149       defaultInstance = new GetProcedureResultResponse(true);
defaultInstance.initFields()48150       defaultInstance.initFields();
48151     }
48152 
48153     // @@protoc_insertion_point(class_scope:GetProcedureResultResponse)
48154   }
48155 
48156   public interface AbortProcedureRequestOrBuilder
48157       extends com.google.protobuf.MessageOrBuilder {
48158 
48159     // required uint64 proc_id = 1;
48160     /**
48161      * <code>required uint64 proc_id = 1;</code>
48162      */
hasProcId()48163     boolean hasProcId();
48164     /**
48165      * <code>required uint64 proc_id = 1;</code>
48166      */
getProcId()48167     long getProcId();
48168 
48169     // optional bool mayInterruptIfRunning = 2 [default = true];
48170     /**
48171      * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48172      */
hasMayInterruptIfRunning()48173     boolean hasMayInterruptIfRunning();
48174     /**
48175      * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48176      */
getMayInterruptIfRunning()48177     boolean getMayInterruptIfRunning();
48178   }
48179   /**
48180    * Protobuf type {@code AbortProcedureRequest}
48181    */
48182   public static final class AbortProcedureRequest extends
48183       com.google.protobuf.GeneratedMessage
48184       implements AbortProcedureRequestOrBuilder {
48185     // Use AbortProcedureRequest.newBuilder() to construct.
AbortProcedureRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)48186     private AbortProcedureRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
48187       super(builder);
48188       this.unknownFields = builder.getUnknownFields();
48189     }
AbortProcedureRequest(boolean noInit)48190     private AbortProcedureRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
48191 
48192     private static final AbortProcedureRequest defaultInstance;
getDefaultInstance()48193     public static AbortProcedureRequest getDefaultInstance() {
48194       return defaultInstance;
48195     }
48196 
getDefaultInstanceForType()48197     public AbortProcedureRequest getDefaultInstanceForType() {
48198       return defaultInstance;
48199     }
48200 
48201     private final com.google.protobuf.UnknownFieldSet unknownFields;
48202     @java.lang.Override
48203     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()48204         getUnknownFields() {
48205       return this.unknownFields;
48206     }
AbortProcedureRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48207     private AbortProcedureRequest(
48208         com.google.protobuf.CodedInputStream input,
48209         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48210         throws com.google.protobuf.InvalidProtocolBufferException {
48211       initFields();
48212       int mutable_bitField0_ = 0;
48213       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
48214           com.google.protobuf.UnknownFieldSet.newBuilder();
48215       try {
48216         boolean done = false;
48217         while (!done) {
48218           int tag = input.readTag();
48219           switch (tag) {
48220             case 0:
48221               done = true;
48222               break;
48223             default: {
48224               if (!parseUnknownField(input, unknownFields,
48225                                      extensionRegistry, tag)) {
48226                 done = true;
48227               }
48228               break;
48229             }
48230             case 8: {
48231               bitField0_ |= 0x00000001;
48232               procId_ = input.readUInt64();
48233               break;
48234             }
48235             case 16: {
48236               bitField0_ |= 0x00000002;
48237               mayInterruptIfRunning_ = input.readBool();
48238               break;
48239             }
48240           }
48241         }
48242       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
48243         throw e.setUnfinishedMessage(this);
48244       } catch (java.io.IOException e) {
48245         throw new com.google.protobuf.InvalidProtocolBufferException(
48246             e.getMessage()).setUnfinishedMessage(this);
48247       } finally {
48248         this.unknownFields = unknownFields.build();
48249         makeExtensionsImmutable();
48250       }
48251     }
48252     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()48253         getDescriptor() {
48254       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureRequest_descriptor;
48255     }
48256 
48257     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()48258         internalGetFieldAccessorTable() {
48259       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureRequest_fieldAccessorTable
48260           .ensureFieldAccessorsInitialized(
48261               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.Builder.class);
48262     }
48263 
48264     public static com.google.protobuf.Parser<AbortProcedureRequest> PARSER =
48265         new com.google.protobuf.AbstractParser<AbortProcedureRequest>() {
48266       public AbortProcedureRequest parsePartialFrom(
48267           com.google.protobuf.CodedInputStream input,
48268           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48269           throws com.google.protobuf.InvalidProtocolBufferException {
48270         return new AbortProcedureRequest(input, extensionRegistry);
48271       }
48272     };
48273 
48274     @java.lang.Override
getParserForType()48275     public com.google.protobuf.Parser<AbortProcedureRequest> getParserForType() {
48276       return PARSER;
48277     }
48278 
48279     private int bitField0_;
48280     // required uint64 proc_id = 1;
48281     public static final int PROC_ID_FIELD_NUMBER = 1;
48282     private long procId_;
48283     /**
48284      * <code>required uint64 proc_id = 1;</code>
48285      */
hasProcId()48286     public boolean hasProcId() {
48287       return ((bitField0_ & 0x00000001) == 0x00000001);
48288     }
48289     /**
48290      * <code>required uint64 proc_id = 1;</code>
48291      */
getProcId()48292     public long getProcId() {
48293       return procId_;
48294     }
48295 
48296     // optional bool mayInterruptIfRunning = 2 [default = true];
48297     public static final int MAYINTERRUPTIFRUNNING_FIELD_NUMBER = 2;
48298     private boolean mayInterruptIfRunning_;
48299     /**
48300      * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48301      */
hasMayInterruptIfRunning()48302     public boolean hasMayInterruptIfRunning() {
48303       return ((bitField0_ & 0x00000002) == 0x00000002);
48304     }
48305     /**
48306      * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48307      */
getMayInterruptIfRunning()48308     public boolean getMayInterruptIfRunning() {
48309       return mayInterruptIfRunning_;
48310     }
48311 
initFields()48312     private void initFields() {
48313       procId_ = 0L;
48314       mayInterruptIfRunning_ = true;
48315     }
48316     private byte memoizedIsInitialized = -1;
isInitialized()48317     public final boolean isInitialized() {
48318       byte isInitialized = memoizedIsInitialized;
48319       if (isInitialized != -1) return isInitialized == 1;
48320 
48321       if (!hasProcId()) {
48322         memoizedIsInitialized = 0;
48323         return false;
48324       }
48325       memoizedIsInitialized = 1;
48326       return true;
48327     }
48328 
writeTo(com.google.protobuf.CodedOutputStream output)48329     public void writeTo(com.google.protobuf.CodedOutputStream output)
48330                         throws java.io.IOException {
48331       getSerializedSize();
48332       if (((bitField0_ & 0x00000001) == 0x00000001)) {
48333         output.writeUInt64(1, procId_);
48334       }
48335       if (((bitField0_ & 0x00000002) == 0x00000002)) {
48336         output.writeBool(2, mayInterruptIfRunning_);
48337       }
48338       getUnknownFields().writeTo(output);
48339     }
48340 
48341     private int memoizedSerializedSize = -1;
getSerializedSize()48342     public int getSerializedSize() {
48343       int size = memoizedSerializedSize;
48344       if (size != -1) return size;
48345 
48346       size = 0;
48347       if (((bitField0_ & 0x00000001) == 0x00000001)) {
48348         size += com.google.protobuf.CodedOutputStream
48349           .computeUInt64Size(1, procId_);
48350       }
48351       if (((bitField0_ & 0x00000002) == 0x00000002)) {
48352         size += com.google.protobuf.CodedOutputStream
48353           .computeBoolSize(2, mayInterruptIfRunning_);
48354       }
48355       size += getUnknownFields().getSerializedSize();
48356       memoizedSerializedSize = size;
48357       return size;
48358     }
48359 
48360     private static final long serialVersionUID = 0L;
48361     @java.lang.Override
writeReplace()48362     protected java.lang.Object writeReplace()
48363         throws java.io.ObjectStreamException {
48364       return super.writeReplace();
48365     }
48366 
48367     @java.lang.Override
equals(final java.lang.Object obj)48368     public boolean equals(final java.lang.Object obj) {
48369       if (obj == this) {
48370        return true;
48371       }
48372       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)) {
48373         return super.equals(obj);
48374       }
48375       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest) obj;
48376 
48377       boolean result = true;
48378       result = result && (hasProcId() == other.hasProcId());
48379       if (hasProcId()) {
48380         result = result && (getProcId()
48381             == other.getProcId());
48382       }
48383       result = result && (hasMayInterruptIfRunning() == other.hasMayInterruptIfRunning());
48384       if (hasMayInterruptIfRunning()) {
48385         result = result && (getMayInterruptIfRunning()
48386             == other.getMayInterruptIfRunning());
48387       }
48388       result = result &&
48389           getUnknownFields().equals(other.getUnknownFields());
48390       return result;
48391     }
48392 
48393     private int memoizedHashCode = 0;
48394     @java.lang.Override
hashCode()48395     public int hashCode() {
48396       if (memoizedHashCode != 0) {
48397         return memoizedHashCode;
48398       }
48399       int hash = 41;
48400       hash = (19 * hash) + getDescriptorForType().hashCode();
48401       if (hasProcId()) {
48402         hash = (37 * hash) + PROC_ID_FIELD_NUMBER;
48403         hash = (53 * hash) + hashLong(getProcId());
48404       }
48405       if (hasMayInterruptIfRunning()) {
48406         hash = (37 * hash) + MAYINTERRUPTIFRUNNING_FIELD_NUMBER;
48407         hash = (53 * hash) + hashBoolean(getMayInterruptIfRunning());
48408       }
48409       hash = (29 * hash) + getUnknownFields().hashCode();
48410       memoizedHashCode = hash;
48411       return hash;
48412     }
48413 
parseFrom( com.google.protobuf.ByteString data)48414     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(
48415         com.google.protobuf.ByteString data)
48416         throws com.google.protobuf.InvalidProtocolBufferException {
48417       return PARSER.parseFrom(data);
48418     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48419     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(
48420         com.google.protobuf.ByteString data,
48421         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48422         throws com.google.protobuf.InvalidProtocolBufferException {
48423       return PARSER.parseFrom(data, extensionRegistry);
48424     }
parseFrom(byte[] data)48425     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(byte[] data)
48426         throws com.google.protobuf.InvalidProtocolBufferException {
48427       return PARSER.parseFrom(data);
48428     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48429     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(
48430         byte[] data,
48431         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48432         throws com.google.protobuf.InvalidProtocolBufferException {
48433       return PARSER.parseFrom(data, extensionRegistry);
48434     }
parseFrom(java.io.InputStream input)48435     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(java.io.InputStream input)
48436         throws java.io.IOException {
48437       return PARSER.parseFrom(input);
48438     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48439     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(
48440         java.io.InputStream input,
48441         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48442         throws java.io.IOException {
48443       return PARSER.parseFrom(input, extensionRegistry);
48444     }
parseDelimitedFrom(java.io.InputStream input)48445     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseDelimitedFrom(java.io.InputStream input)
48446         throws java.io.IOException {
48447       return PARSER.parseDelimitedFrom(input);
48448     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48449     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseDelimitedFrom(
48450         java.io.InputStream input,
48451         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48452         throws java.io.IOException {
48453       return PARSER.parseDelimitedFrom(input, extensionRegistry);
48454     }
parseFrom( com.google.protobuf.CodedInputStream input)48455     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(
48456         com.google.protobuf.CodedInputStream input)
48457         throws java.io.IOException {
48458       return PARSER.parseFrom(input);
48459     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48460     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parseFrom(
48461         com.google.protobuf.CodedInputStream input,
48462         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48463         throws java.io.IOException {
48464       return PARSER.parseFrom(input, extensionRegistry);
48465     }
48466 
newBuilder()48467     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()48468     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest prototype)48469     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest prototype) {
48470       return newBuilder().mergeFrom(prototype);
48471     }
toBuilder()48472     public Builder toBuilder() { return newBuilder(this); }
48473 
48474     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)48475     protected Builder newBuilderForType(
48476         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
48477       Builder builder = new Builder(parent);
48478       return builder;
48479     }
48480     /**
48481      * Protobuf type {@code AbortProcedureRequest}
48482      */
48483     public static final class Builder extends
48484         com.google.protobuf.GeneratedMessage.Builder<Builder>
48485        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequestOrBuilder {
48486       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()48487           getDescriptor() {
48488         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureRequest_descriptor;
48489       }
48490 
48491       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()48492           internalGetFieldAccessorTable() {
48493         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureRequest_fieldAccessorTable
48494             .ensureFieldAccessorsInitialized(
48495                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.Builder.class);
48496       }
48497 
48498       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.newBuilder()
Builder()48499       private Builder() {
48500         maybeForceBuilderInitialization();
48501       }
48502 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)48503       private Builder(
48504           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
48505         super(parent);
48506         maybeForceBuilderInitialization();
48507       }
maybeForceBuilderInitialization()48508       private void maybeForceBuilderInitialization() {
48509         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
48510         }
48511       }
create()48512       private static Builder create() {
48513         return new Builder();
48514       }
48515 
clear()48516       public Builder clear() {
48517         super.clear();
48518         procId_ = 0L;
48519         bitField0_ = (bitField0_ & ~0x00000001);
48520         mayInterruptIfRunning_ = true;
48521         bitField0_ = (bitField0_ & ~0x00000002);
48522         return this;
48523       }
48524 
clone()48525       public Builder clone() {
48526         return create().mergeFrom(buildPartial());
48527       }
48528 
48529       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()48530           getDescriptorForType() {
48531         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureRequest_descriptor;
48532       }
48533 
getDefaultInstanceForType()48534       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest getDefaultInstanceForType() {
48535         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
48536       }
48537 
build()48538       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest build() {
48539         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest result = buildPartial();
48540         if (!result.isInitialized()) {
48541           throw newUninitializedMessageException(result);
48542         }
48543         return result;
48544       }
48545 
buildPartial()48546       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest buildPartial() {
48547         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest(this);
48548         int from_bitField0_ = bitField0_;
48549         int to_bitField0_ = 0;
48550         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
48551           to_bitField0_ |= 0x00000001;
48552         }
48553         result.procId_ = procId_;
48554         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
48555           to_bitField0_ |= 0x00000002;
48556         }
48557         result.mayInterruptIfRunning_ = mayInterruptIfRunning_;
48558         result.bitField0_ = to_bitField0_;
48559         onBuilt();
48560         return result;
48561       }
48562 
mergeFrom(com.google.protobuf.Message other)48563       public Builder mergeFrom(com.google.protobuf.Message other) {
48564         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest) {
48565           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)other);
48566         } else {
48567           super.mergeFrom(other);
48568           return this;
48569         }
48570       }
48571 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest other)48572       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest other) {
48573         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance()) return this;
48574         if (other.hasProcId()) {
48575           setProcId(other.getProcId());
48576         }
48577         if (other.hasMayInterruptIfRunning()) {
48578           setMayInterruptIfRunning(other.getMayInterruptIfRunning());
48579         }
48580         this.mergeUnknownFields(other.getUnknownFields());
48581         return this;
48582       }
48583 
isInitialized()48584       public final boolean isInitialized() {
48585         if (!hasProcId()) {
48586 
48587           return false;
48588         }
48589         return true;
48590       }
48591 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48592       public Builder mergeFrom(
48593           com.google.protobuf.CodedInputStream input,
48594           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48595           throws java.io.IOException {
48596         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest parsedMessage = null;
48597         try {
48598           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
48599         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
48600           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest) e.getUnfinishedMessage();
48601           throw e;
48602         } finally {
48603           if (parsedMessage != null) {
48604             mergeFrom(parsedMessage);
48605           }
48606         }
48607         return this;
48608       }
48609       private int bitField0_;
48610 
48611       // required uint64 proc_id = 1;
48612       private long procId_ ;
48613       /**
48614        * <code>required uint64 proc_id = 1;</code>
48615        */
hasProcId()48616       public boolean hasProcId() {
48617         return ((bitField0_ & 0x00000001) == 0x00000001);
48618       }
48619       /**
48620        * <code>required uint64 proc_id = 1;</code>
48621        */
getProcId()48622       public long getProcId() {
48623         return procId_;
48624       }
48625       /**
48626        * <code>required uint64 proc_id = 1;</code>
48627        */
setProcId(long value)48628       public Builder setProcId(long value) {
48629         bitField0_ |= 0x00000001;
48630         procId_ = value;
48631         onChanged();
48632         return this;
48633       }
48634       /**
48635        * <code>required uint64 proc_id = 1;</code>
48636        */
clearProcId()48637       public Builder clearProcId() {
48638         bitField0_ = (bitField0_ & ~0x00000001);
48639         procId_ = 0L;
48640         onChanged();
48641         return this;
48642       }
48643 
48644       // optional bool mayInterruptIfRunning = 2 [default = true];
48645       private boolean mayInterruptIfRunning_ = true;
48646       /**
48647        * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48648        */
hasMayInterruptIfRunning()48649       public boolean hasMayInterruptIfRunning() {
48650         return ((bitField0_ & 0x00000002) == 0x00000002);
48651       }
48652       /**
48653        * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48654        */
getMayInterruptIfRunning()48655       public boolean getMayInterruptIfRunning() {
48656         return mayInterruptIfRunning_;
48657       }
48658       /**
48659        * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48660        */
setMayInterruptIfRunning(boolean value)48661       public Builder setMayInterruptIfRunning(boolean value) {
48662         bitField0_ |= 0x00000002;
48663         mayInterruptIfRunning_ = value;
48664         onChanged();
48665         return this;
48666       }
48667       /**
48668        * <code>optional bool mayInterruptIfRunning = 2 [default = true];</code>
48669        */
clearMayInterruptIfRunning()48670       public Builder clearMayInterruptIfRunning() {
48671         bitField0_ = (bitField0_ & ~0x00000002);
48672         mayInterruptIfRunning_ = true;
48673         onChanged();
48674         return this;
48675       }
48676 
48677       // @@protoc_insertion_point(builder_scope:AbortProcedureRequest)
48678     }
48679 
48680     static {
48681       defaultInstance = new AbortProcedureRequest(true);
defaultInstance.initFields()48682       defaultInstance.initFields();
48683     }
48684 
48685     // @@protoc_insertion_point(class_scope:AbortProcedureRequest)
48686   }
48687 
48688   public interface AbortProcedureResponseOrBuilder
48689       extends com.google.protobuf.MessageOrBuilder {
48690 
48691     // required bool is_procedure_aborted = 1;
48692     /**
48693      * <code>required bool is_procedure_aborted = 1;</code>
48694      */
hasIsProcedureAborted()48695     boolean hasIsProcedureAborted();
48696     /**
48697      * <code>required bool is_procedure_aborted = 1;</code>
48698      */
getIsProcedureAborted()48699     boolean getIsProcedureAborted();
48700   }
48701   /**
48702    * Protobuf type {@code AbortProcedureResponse}
48703    */
48704   public static final class AbortProcedureResponse extends
48705       com.google.protobuf.GeneratedMessage
48706       implements AbortProcedureResponseOrBuilder {
48707     // Use AbortProcedureResponse.newBuilder() to construct.
AbortProcedureResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)48708     private AbortProcedureResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
48709       super(builder);
48710       this.unknownFields = builder.getUnknownFields();
48711     }
AbortProcedureResponse(boolean noInit)48712     private AbortProcedureResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
48713 
48714     private static final AbortProcedureResponse defaultInstance;
getDefaultInstance()48715     public static AbortProcedureResponse getDefaultInstance() {
48716       return defaultInstance;
48717     }
48718 
getDefaultInstanceForType()48719     public AbortProcedureResponse getDefaultInstanceForType() {
48720       return defaultInstance;
48721     }
48722 
48723     private final com.google.protobuf.UnknownFieldSet unknownFields;
48724     @java.lang.Override
48725     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()48726         getUnknownFields() {
48727       return this.unknownFields;
48728     }
AbortProcedureResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48729     private AbortProcedureResponse(
48730         com.google.protobuf.CodedInputStream input,
48731         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48732         throws com.google.protobuf.InvalidProtocolBufferException {
48733       initFields();
48734       int mutable_bitField0_ = 0;
48735       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
48736           com.google.protobuf.UnknownFieldSet.newBuilder();
48737       try {
48738         boolean done = false;
48739         while (!done) {
48740           int tag = input.readTag();
48741           switch (tag) {
48742             case 0:
48743               done = true;
48744               break;
48745             default: {
48746               if (!parseUnknownField(input, unknownFields,
48747                                      extensionRegistry, tag)) {
48748                 done = true;
48749               }
48750               break;
48751             }
48752             case 8: {
48753               bitField0_ |= 0x00000001;
48754               isProcedureAborted_ = input.readBool();
48755               break;
48756             }
48757           }
48758         }
48759       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
48760         throw e.setUnfinishedMessage(this);
48761       } catch (java.io.IOException e) {
48762         throw new com.google.protobuf.InvalidProtocolBufferException(
48763             e.getMessage()).setUnfinishedMessage(this);
48764       } finally {
48765         this.unknownFields = unknownFields.build();
48766         makeExtensionsImmutable();
48767       }
48768     }
48769     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()48770         getDescriptor() {
48771       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureResponse_descriptor;
48772     }
48773 
48774     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()48775         internalGetFieldAccessorTable() {
48776       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureResponse_fieldAccessorTable
48777           .ensureFieldAccessorsInitialized(
48778               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.Builder.class);
48779     }
48780 
48781     public static com.google.protobuf.Parser<AbortProcedureResponse> PARSER =
48782         new com.google.protobuf.AbstractParser<AbortProcedureResponse>() {
48783       public AbortProcedureResponse parsePartialFrom(
48784           com.google.protobuf.CodedInputStream input,
48785           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48786           throws com.google.protobuf.InvalidProtocolBufferException {
48787         return new AbortProcedureResponse(input, extensionRegistry);
48788       }
48789     };
48790 
48791     @java.lang.Override
getParserForType()48792     public com.google.protobuf.Parser<AbortProcedureResponse> getParserForType() {
48793       return PARSER;
48794     }
48795 
48796     private int bitField0_;
48797     // required bool is_procedure_aborted = 1;
48798     public static final int IS_PROCEDURE_ABORTED_FIELD_NUMBER = 1;
48799     private boolean isProcedureAborted_;
48800     /**
48801      * <code>required bool is_procedure_aborted = 1;</code>
48802      */
hasIsProcedureAborted()48803     public boolean hasIsProcedureAborted() {
48804       return ((bitField0_ & 0x00000001) == 0x00000001);
48805     }
48806     /**
48807      * <code>required bool is_procedure_aborted = 1;</code>
48808      */
getIsProcedureAborted()48809     public boolean getIsProcedureAborted() {
48810       return isProcedureAborted_;
48811     }
48812 
initFields()48813     private void initFields() {
48814       isProcedureAborted_ = false;
48815     }
48816     private byte memoizedIsInitialized = -1;
isInitialized()48817     public final boolean isInitialized() {
48818       byte isInitialized = memoizedIsInitialized;
48819       if (isInitialized != -1) return isInitialized == 1;
48820 
48821       if (!hasIsProcedureAborted()) {
48822         memoizedIsInitialized = 0;
48823         return false;
48824       }
48825       memoizedIsInitialized = 1;
48826       return true;
48827     }
48828 
writeTo(com.google.protobuf.CodedOutputStream output)48829     public void writeTo(com.google.protobuf.CodedOutputStream output)
48830                         throws java.io.IOException {
48831       getSerializedSize();
48832       if (((bitField0_ & 0x00000001) == 0x00000001)) {
48833         output.writeBool(1, isProcedureAborted_);
48834       }
48835       getUnknownFields().writeTo(output);
48836     }
48837 
48838     private int memoizedSerializedSize = -1;
getSerializedSize()48839     public int getSerializedSize() {
48840       int size = memoizedSerializedSize;
48841       if (size != -1) return size;
48842 
48843       size = 0;
48844       if (((bitField0_ & 0x00000001) == 0x00000001)) {
48845         size += com.google.protobuf.CodedOutputStream
48846           .computeBoolSize(1, isProcedureAborted_);
48847       }
48848       size += getUnknownFields().getSerializedSize();
48849       memoizedSerializedSize = size;
48850       return size;
48851     }
48852 
48853     private static final long serialVersionUID = 0L;
48854     @java.lang.Override
writeReplace()48855     protected java.lang.Object writeReplace()
48856         throws java.io.ObjectStreamException {
48857       return super.writeReplace();
48858     }
48859 
48860     @java.lang.Override
equals(final java.lang.Object obj)48861     public boolean equals(final java.lang.Object obj) {
48862       if (obj == this) {
48863        return true;
48864       }
48865       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse)) {
48866         return super.equals(obj);
48867       }
48868       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse) obj;
48869 
48870       boolean result = true;
48871       result = result && (hasIsProcedureAborted() == other.hasIsProcedureAborted());
48872       if (hasIsProcedureAborted()) {
48873         result = result && (getIsProcedureAborted()
48874             == other.getIsProcedureAborted());
48875       }
48876       result = result &&
48877           getUnknownFields().equals(other.getUnknownFields());
48878       return result;
48879     }
48880 
48881     private int memoizedHashCode = 0;
48882     @java.lang.Override
hashCode()48883     public int hashCode() {
48884       if (memoizedHashCode != 0) {
48885         return memoizedHashCode;
48886       }
48887       int hash = 41;
48888       hash = (19 * hash) + getDescriptorForType().hashCode();
48889       if (hasIsProcedureAborted()) {
48890         hash = (37 * hash) + IS_PROCEDURE_ABORTED_FIELD_NUMBER;
48891         hash = (53 * hash) + hashBoolean(getIsProcedureAborted());
48892       }
48893       hash = (29 * hash) + getUnknownFields().hashCode();
48894       memoizedHashCode = hash;
48895       return hash;
48896     }
48897 
parseFrom( com.google.protobuf.ByteString data)48898     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(
48899         com.google.protobuf.ByteString data)
48900         throws com.google.protobuf.InvalidProtocolBufferException {
48901       return PARSER.parseFrom(data);
48902     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48903     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(
48904         com.google.protobuf.ByteString data,
48905         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48906         throws com.google.protobuf.InvalidProtocolBufferException {
48907       return PARSER.parseFrom(data, extensionRegistry);
48908     }
parseFrom(byte[] data)48909     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(byte[] data)
48910         throws com.google.protobuf.InvalidProtocolBufferException {
48911       return PARSER.parseFrom(data);
48912     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48913     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(
48914         byte[] data,
48915         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48916         throws com.google.protobuf.InvalidProtocolBufferException {
48917       return PARSER.parseFrom(data, extensionRegistry);
48918     }
parseFrom(java.io.InputStream input)48919     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(java.io.InputStream input)
48920         throws java.io.IOException {
48921       return PARSER.parseFrom(input);
48922     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48923     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(
48924         java.io.InputStream input,
48925         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48926         throws java.io.IOException {
48927       return PARSER.parseFrom(input, extensionRegistry);
48928     }
parseDelimitedFrom(java.io.InputStream input)48929     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseDelimitedFrom(java.io.InputStream input)
48930         throws java.io.IOException {
48931       return PARSER.parseDelimitedFrom(input);
48932     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48933     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseDelimitedFrom(
48934         java.io.InputStream input,
48935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48936         throws java.io.IOException {
48937       return PARSER.parseDelimitedFrom(input, extensionRegistry);
48938     }
parseFrom( com.google.protobuf.CodedInputStream input)48939     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(
48940         com.google.protobuf.CodedInputStream input)
48941         throws java.io.IOException {
48942       return PARSER.parseFrom(input);
48943     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)48944     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parseFrom(
48945         com.google.protobuf.CodedInputStream input,
48946         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
48947         throws java.io.IOException {
48948       return PARSER.parseFrom(input, extensionRegistry);
48949     }
48950 
newBuilder()48951     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()48952     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse prototype)48953     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse prototype) {
48954       return newBuilder().mergeFrom(prototype);
48955     }
toBuilder()48956     public Builder toBuilder() { return newBuilder(this); }
48957 
48958     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)48959     protected Builder newBuilderForType(
48960         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
48961       Builder builder = new Builder(parent);
48962       return builder;
48963     }
48964     /**
48965      * Protobuf type {@code AbortProcedureResponse}
48966      */
48967     public static final class Builder extends
48968         com.google.protobuf.GeneratedMessage.Builder<Builder>
48969        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponseOrBuilder {
48970       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()48971           getDescriptor() {
48972         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureResponse_descriptor;
48973       }
48974 
48975       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()48976           internalGetFieldAccessorTable() {
48977         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureResponse_fieldAccessorTable
48978             .ensureFieldAccessorsInitialized(
48979                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.Builder.class);
48980       }
48981 
48982       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.newBuilder()
Builder()48983       private Builder() {
48984         maybeForceBuilderInitialization();
48985       }
48986 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)48987       private Builder(
48988           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
48989         super(parent);
48990         maybeForceBuilderInitialization();
48991       }
maybeForceBuilderInitialization()48992       private void maybeForceBuilderInitialization() {
48993         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
48994         }
48995       }
create()48996       private static Builder create() {
48997         return new Builder();
48998       }
48999 
clear()49000       public Builder clear() {
49001         super.clear();
49002         isProcedureAborted_ = false;
49003         bitField0_ = (bitField0_ & ~0x00000001);
49004         return this;
49005       }
49006 
clone()49007       public Builder clone() {
49008         return create().mergeFrom(buildPartial());
49009       }
49010 
49011       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()49012           getDescriptorForType() {
49013         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_AbortProcedureResponse_descriptor;
49014       }
49015 
getDefaultInstanceForType()49016       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse getDefaultInstanceForType() {
49017         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
49018       }
49019 
build()49020       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse build() {
49021         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse result = buildPartial();
49022         if (!result.isInitialized()) {
49023           throw newUninitializedMessageException(result);
49024         }
49025         return result;
49026       }
49027 
buildPartial()49028       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse buildPartial() {
49029         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse(this);
49030         int from_bitField0_ = bitField0_;
49031         int to_bitField0_ = 0;
49032         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
49033           to_bitField0_ |= 0x00000001;
49034         }
49035         result.isProcedureAborted_ = isProcedureAborted_;
49036         result.bitField0_ = to_bitField0_;
49037         onBuilt();
49038         return result;
49039       }
49040 
mergeFrom(com.google.protobuf.Message other)49041       public Builder mergeFrom(com.google.protobuf.Message other) {
49042         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse) {
49043           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse)other);
49044         } else {
49045           super.mergeFrom(other);
49046           return this;
49047         }
49048       }
49049 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse other)49050       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse other) {
49051         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance()) return this;
49052         if (other.hasIsProcedureAborted()) {
49053           setIsProcedureAborted(other.getIsProcedureAborted());
49054         }
49055         this.mergeUnknownFields(other.getUnknownFields());
49056         return this;
49057       }
49058 
isInitialized()49059       public final boolean isInitialized() {
49060         if (!hasIsProcedureAborted()) {
49061 
49062           return false;
49063         }
49064         return true;
49065       }
49066 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49067       public Builder mergeFrom(
49068           com.google.protobuf.CodedInputStream input,
49069           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49070           throws java.io.IOException {
49071         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse parsedMessage = null;
49072         try {
49073           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
49074         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
49075           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse) e.getUnfinishedMessage();
49076           throw e;
49077         } finally {
49078           if (parsedMessage != null) {
49079             mergeFrom(parsedMessage);
49080           }
49081         }
49082         return this;
49083       }
49084       private int bitField0_;
49085 
49086       // required bool is_procedure_aborted = 1;
49087       private boolean isProcedureAborted_ ;
49088       /**
49089        * <code>required bool is_procedure_aborted = 1;</code>
49090        */
hasIsProcedureAborted()49091       public boolean hasIsProcedureAborted() {
49092         return ((bitField0_ & 0x00000001) == 0x00000001);
49093       }
49094       /**
49095        * <code>required bool is_procedure_aborted = 1;</code>
49096        */
getIsProcedureAborted()49097       public boolean getIsProcedureAborted() {
49098         return isProcedureAborted_;
49099       }
49100       /**
49101        * <code>required bool is_procedure_aborted = 1;</code>
49102        */
setIsProcedureAborted(boolean value)49103       public Builder setIsProcedureAborted(boolean value) {
49104         bitField0_ |= 0x00000001;
49105         isProcedureAborted_ = value;
49106         onChanged();
49107         return this;
49108       }
49109       /**
49110        * <code>required bool is_procedure_aborted = 1;</code>
49111        */
clearIsProcedureAborted()49112       public Builder clearIsProcedureAborted() {
49113         bitField0_ = (bitField0_ & ~0x00000001);
49114         isProcedureAborted_ = false;
49115         onChanged();
49116         return this;
49117       }
49118 
49119       // @@protoc_insertion_point(builder_scope:AbortProcedureResponse)
49120     }
49121 
49122     static {
49123       defaultInstance = new AbortProcedureResponse(true);
defaultInstance.initFields()49124       defaultInstance.initFields();
49125     }
49126 
49127     // @@protoc_insertion_point(class_scope:AbortProcedureResponse)
49128   }
49129 
49130   public interface ListProceduresRequestOrBuilder
49131       extends com.google.protobuf.MessageOrBuilder {
49132   }
49133   /**
49134    * Protobuf type {@code ListProceduresRequest}
49135    */
49136   public static final class ListProceduresRequest extends
49137       com.google.protobuf.GeneratedMessage
49138       implements ListProceduresRequestOrBuilder {
49139     // Use ListProceduresRequest.newBuilder() to construct.
ListProceduresRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)49140     private ListProceduresRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
49141       super(builder);
49142       this.unknownFields = builder.getUnknownFields();
49143     }
ListProceduresRequest(boolean noInit)49144     private ListProceduresRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
49145 
49146     private static final ListProceduresRequest defaultInstance;
getDefaultInstance()49147     public static ListProceduresRequest getDefaultInstance() {
49148       return defaultInstance;
49149     }
49150 
getDefaultInstanceForType()49151     public ListProceduresRequest getDefaultInstanceForType() {
49152       return defaultInstance;
49153     }
49154 
49155     private final com.google.protobuf.UnknownFieldSet unknownFields;
49156     @java.lang.Override
49157     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()49158         getUnknownFields() {
49159       return this.unknownFields;
49160     }
ListProceduresRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49161     private ListProceduresRequest(
49162         com.google.protobuf.CodedInputStream input,
49163         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49164         throws com.google.protobuf.InvalidProtocolBufferException {
49165       initFields();
49166       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
49167           com.google.protobuf.UnknownFieldSet.newBuilder();
49168       try {
49169         boolean done = false;
49170         while (!done) {
49171           int tag = input.readTag();
49172           switch (tag) {
49173             case 0:
49174               done = true;
49175               break;
49176             default: {
49177               if (!parseUnknownField(input, unknownFields,
49178                                      extensionRegistry, tag)) {
49179                 done = true;
49180               }
49181               break;
49182             }
49183           }
49184         }
49185       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
49186         throw e.setUnfinishedMessage(this);
49187       } catch (java.io.IOException e) {
49188         throw new com.google.protobuf.InvalidProtocolBufferException(
49189             e.getMessage()).setUnfinishedMessage(this);
49190       } finally {
49191         this.unknownFields = unknownFields.build();
49192         makeExtensionsImmutable();
49193       }
49194     }
49195     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()49196         getDescriptor() {
49197       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_descriptor;
49198     }
49199 
49200     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()49201         internalGetFieldAccessorTable() {
49202       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_fieldAccessorTable
49203           .ensureFieldAccessorsInitialized(
49204               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class);
49205     }
49206 
49207     public static com.google.protobuf.Parser<ListProceduresRequest> PARSER =
49208         new com.google.protobuf.AbstractParser<ListProceduresRequest>() {
49209       public ListProceduresRequest parsePartialFrom(
49210           com.google.protobuf.CodedInputStream input,
49211           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49212           throws com.google.protobuf.InvalidProtocolBufferException {
49213         return new ListProceduresRequest(input, extensionRegistry);
49214       }
49215     };
49216 
49217     @java.lang.Override
getParserForType()49218     public com.google.protobuf.Parser<ListProceduresRequest> getParserForType() {
49219       return PARSER;
49220     }
49221 
initFields()49222     private void initFields() {
49223     }
49224     private byte memoizedIsInitialized = -1;
isInitialized()49225     public final boolean isInitialized() {
49226       byte isInitialized = memoizedIsInitialized;
49227       if (isInitialized != -1) return isInitialized == 1;
49228 
49229       memoizedIsInitialized = 1;
49230       return true;
49231     }
49232 
writeTo(com.google.protobuf.CodedOutputStream output)49233     public void writeTo(com.google.protobuf.CodedOutputStream output)
49234                         throws java.io.IOException {
49235       getSerializedSize();
49236       getUnknownFields().writeTo(output);
49237     }
49238 
49239     private int memoizedSerializedSize = -1;
getSerializedSize()49240     public int getSerializedSize() {
49241       int size = memoizedSerializedSize;
49242       if (size != -1) return size;
49243 
49244       size = 0;
49245       size += getUnknownFields().getSerializedSize();
49246       memoizedSerializedSize = size;
49247       return size;
49248     }
49249 
49250     private static final long serialVersionUID = 0L;
49251     @java.lang.Override
writeReplace()49252     protected java.lang.Object writeReplace()
49253         throws java.io.ObjectStreamException {
49254       return super.writeReplace();
49255     }
49256 
49257     @java.lang.Override
equals(final java.lang.Object obj)49258     public boolean equals(final java.lang.Object obj) {
49259       if (obj == this) {
49260        return true;
49261       }
49262       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)) {
49263         return super.equals(obj);
49264       }
49265       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) obj;
49266 
49267       boolean result = true;
49268       result = result &&
49269           getUnknownFields().equals(other.getUnknownFields());
49270       return result;
49271     }
49272 
49273     private int memoizedHashCode = 0;
49274     @java.lang.Override
hashCode()49275     public int hashCode() {
49276       if (memoizedHashCode != 0) {
49277         return memoizedHashCode;
49278       }
49279       int hash = 41;
49280       hash = (19 * hash) + getDescriptorForType().hashCode();
49281       hash = (29 * hash) + getUnknownFields().hashCode();
49282       memoizedHashCode = hash;
49283       return hash;
49284     }
49285 
parseFrom( com.google.protobuf.ByteString data)49286     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
49287         com.google.protobuf.ByteString data)
49288         throws com.google.protobuf.InvalidProtocolBufferException {
49289       return PARSER.parseFrom(data);
49290     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49291     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
49292         com.google.protobuf.ByteString data,
49293         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49294         throws com.google.protobuf.InvalidProtocolBufferException {
49295       return PARSER.parseFrom(data, extensionRegistry);
49296     }
parseFrom(byte[] data)49297     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(byte[] data)
49298         throws com.google.protobuf.InvalidProtocolBufferException {
49299       return PARSER.parseFrom(data);
49300     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49301     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
49302         byte[] data,
49303         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49304         throws com.google.protobuf.InvalidProtocolBufferException {
49305       return PARSER.parseFrom(data, extensionRegistry);
49306     }
parseFrom(java.io.InputStream input)49307     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(java.io.InputStream input)
49308         throws java.io.IOException {
49309       return PARSER.parseFrom(input);
49310     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49311     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
49312         java.io.InputStream input,
49313         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49314         throws java.io.IOException {
49315       return PARSER.parseFrom(input, extensionRegistry);
49316     }
parseDelimitedFrom(java.io.InputStream input)49317     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(java.io.InputStream input)
49318         throws java.io.IOException {
49319       return PARSER.parseDelimitedFrom(input);
49320     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49321     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseDelimitedFrom(
49322         java.io.InputStream input,
49323         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49324         throws java.io.IOException {
49325       return PARSER.parseDelimitedFrom(input, extensionRegistry);
49326     }
parseFrom( com.google.protobuf.CodedInputStream input)49327     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
49328         com.google.protobuf.CodedInputStream input)
49329         throws java.io.IOException {
49330       return PARSER.parseFrom(input);
49331     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49332     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parseFrom(
49333         com.google.protobuf.CodedInputStream input,
49334         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49335         throws java.io.IOException {
49336       return PARSER.parseFrom(input, extensionRegistry);
49337     }
49338 
newBuilder()49339     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()49340     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest prototype)49341     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest prototype) {
49342       return newBuilder().mergeFrom(prototype);
49343     }
toBuilder()49344     public Builder toBuilder() { return newBuilder(this); }
49345 
49346     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)49347     protected Builder newBuilderForType(
49348         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
49349       Builder builder = new Builder(parent);
49350       return builder;
49351     }
49352     /**
49353      * Protobuf type {@code ListProceduresRequest}
49354      */
49355     public static final class Builder extends
49356         com.google.protobuf.GeneratedMessage.Builder<Builder>
49357        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequestOrBuilder {
49358       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()49359           getDescriptor() {
49360         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_descriptor;
49361       }
49362 
49363       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()49364           internalGetFieldAccessorTable() {
49365         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_fieldAccessorTable
49366             .ensureFieldAccessorsInitialized(
49367                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.Builder.class);
49368       }
49369 
49370       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.newBuilder()
Builder()49371       private Builder() {
49372         maybeForceBuilderInitialization();
49373       }
49374 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)49375       private Builder(
49376           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
49377         super(parent);
49378         maybeForceBuilderInitialization();
49379       }
maybeForceBuilderInitialization()49380       private void maybeForceBuilderInitialization() {
49381         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
49382         }
49383       }
create()49384       private static Builder create() {
49385         return new Builder();
49386       }
49387 
clear()49388       public Builder clear() {
49389         super.clear();
49390         return this;
49391       }
49392 
clone()49393       public Builder clone() {
49394         return create().mergeFrom(buildPartial());
49395       }
49396 
49397       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()49398           getDescriptorForType() {
49399         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresRequest_descriptor;
49400       }
49401 
getDefaultInstanceForType()49402       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest getDefaultInstanceForType() {
49403         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
49404       }
49405 
build()49406       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest build() {
49407         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest result = buildPartial();
49408         if (!result.isInitialized()) {
49409           throw newUninitializedMessageException(result);
49410         }
49411         return result;
49412       }
49413 
buildPartial()49414       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest buildPartial() {
49415         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest(this);
49416         onBuilt();
49417         return result;
49418       }
49419 
mergeFrom(com.google.protobuf.Message other)49420       public Builder mergeFrom(com.google.protobuf.Message other) {
49421         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) {
49422           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)other);
49423         } else {
49424           super.mergeFrom(other);
49425           return this;
49426         }
49427       }
49428 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other)49429       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest other) {
49430         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance()) return this;
49431         this.mergeUnknownFields(other.getUnknownFields());
49432         return this;
49433       }
49434 
isInitialized()49435       public final boolean isInitialized() {
49436         return true;
49437       }
49438 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49439       public Builder mergeFrom(
49440           com.google.protobuf.CodedInputStream input,
49441           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49442           throws java.io.IOException {
49443         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest parsedMessage = null;
49444         try {
49445           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
49446         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
49447           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest) e.getUnfinishedMessage();
49448           throw e;
49449         } finally {
49450           if (parsedMessage != null) {
49451             mergeFrom(parsedMessage);
49452           }
49453         }
49454         return this;
49455       }
49456 
49457       // @@protoc_insertion_point(builder_scope:ListProceduresRequest)
49458     }
49459 
49460     static {
49461       defaultInstance = new ListProceduresRequest(true);
defaultInstance.initFields()49462       defaultInstance.initFields();
49463     }
49464 
49465     // @@protoc_insertion_point(class_scope:ListProceduresRequest)
49466   }
49467 
49468   public interface ListProceduresResponseOrBuilder
49469       extends com.google.protobuf.MessageOrBuilder {
49470 
49471     // repeated .Procedure procedure = 1;
49472     /**
49473      * <code>repeated .Procedure procedure = 1;</code>
49474      */
49475     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>
getProcedureList()49476         getProcedureList();
49477     /**
49478      * <code>repeated .Procedure procedure = 1;</code>
49479      */
getProcedure(int index)49480     org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index);
49481     /**
49482      * <code>repeated .Procedure procedure = 1;</code>
49483      */
getProcedureCount()49484     int getProcedureCount();
49485     /**
49486      * <code>repeated .Procedure procedure = 1;</code>
49487      */
49488     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getProcedureOrBuilderList()49489         getProcedureOrBuilderList();
49490     /**
49491      * <code>repeated .Procedure procedure = 1;</code>
49492      */
getProcedureOrBuilder( int index)49493     org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
49494         int index);
49495   }
49496   /**
49497    * Protobuf type {@code ListProceduresResponse}
49498    */
49499   public static final class ListProceduresResponse extends
49500       com.google.protobuf.GeneratedMessage
49501       implements ListProceduresResponseOrBuilder {
49502     // Use ListProceduresResponse.newBuilder() to construct.
ListProceduresResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)49503     private ListProceduresResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
49504       super(builder);
49505       this.unknownFields = builder.getUnknownFields();
49506     }
ListProceduresResponse(boolean noInit)49507     private ListProceduresResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
49508 
49509     private static final ListProceduresResponse defaultInstance;
getDefaultInstance()49510     public static ListProceduresResponse getDefaultInstance() {
49511       return defaultInstance;
49512     }
49513 
getDefaultInstanceForType()49514     public ListProceduresResponse getDefaultInstanceForType() {
49515       return defaultInstance;
49516     }
49517 
49518     private final com.google.protobuf.UnknownFieldSet unknownFields;
49519     @java.lang.Override
49520     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()49521         getUnknownFields() {
49522       return this.unknownFields;
49523     }
ListProceduresResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49524     private ListProceduresResponse(
49525         com.google.protobuf.CodedInputStream input,
49526         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49527         throws com.google.protobuf.InvalidProtocolBufferException {
49528       initFields();
49529       int mutable_bitField0_ = 0;
49530       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
49531           com.google.protobuf.UnknownFieldSet.newBuilder();
49532       try {
49533         boolean done = false;
49534         while (!done) {
49535           int tag = input.readTag();
49536           switch (tag) {
49537             case 0:
49538               done = true;
49539               break;
49540             default: {
49541               if (!parseUnknownField(input, unknownFields,
49542                                      extensionRegistry, tag)) {
49543                 done = true;
49544               }
49545               break;
49546             }
49547             case 10: {
49548               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
49549                 procedure_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>();
49550                 mutable_bitField0_ |= 0x00000001;
49551               }
49552               procedure_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.PARSER, extensionRegistry));
49553               break;
49554             }
49555           }
49556         }
49557       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
49558         throw e.setUnfinishedMessage(this);
49559       } catch (java.io.IOException e) {
49560         throw new com.google.protobuf.InvalidProtocolBufferException(
49561             e.getMessage()).setUnfinishedMessage(this);
49562       } finally {
49563         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
49564           procedure_ = java.util.Collections.unmodifiableList(procedure_);
49565         }
49566         this.unknownFields = unknownFields.build();
49567         makeExtensionsImmutable();
49568       }
49569     }
49570     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()49571         getDescriptor() {
49572       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_descriptor;
49573     }
49574 
49575     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()49576         internalGetFieldAccessorTable() {
49577       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_fieldAccessorTable
49578           .ensureFieldAccessorsInitialized(
49579               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class);
49580     }
49581 
49582     public static com.google.protobuf.Parser<ListProceduresResponse> PARSER =
49583         new com.google.protobuf.AbstractParser<ListProceduresResponse>() {
49584       public ListProceduresResponse parsePartialFrom(
49585           com.google.protobuf.CodedInputStream input,
49586           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49587           throws com.google.protobuf.InvalidProtocolBufferException {
49588         return new ListProceduresResponse(input, extensionRegistry);
49589       }
49590     };
49591 
49592     @java.lang.Override
getParserForType()49593     public com.google.protobuf.Parser<ListProceduresResponse> getParserForType() {
49594       return PARSER;
49595     }
49596 
49597     // repeated .Procedure procedure = 1;
49598     public static final int PROCEDURE_FIELD_NUMBER = 1;
49599     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> procedure_;
49600     /**
49601      * <code>repeated .Procedure procedure = 1;</code>
49602      */
getProcedureList()49603     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> getProcedureList() {
49604       return procedure_;
49605     }
49606     /**
49607      * <code>repeated .Procedure procedure = 1;</code>
49608      */
49609     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getProcedureOrBuilderList()49610         getProcedureOrBuilderList() {
49611       return procedure_;
49612     }
49613     /**
49614      * <code>repeated .Procedure procedure = 1;</code>
49615      */
getProcedureCount()49616     public int getProcedureCount() {
49617       return procedure_.size();
49618     }
49619     /**
49620      * <code>repeated .Procedure procedure = 1;</code>
49621      */
getProcedure(int index)49622     public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) {
49623       return procedure_.get(index);
49624     }
49625     /**
49626      * <code>repeated .Procedure procedure = 1;</code>
49627      */
getProcedureOrBuilder( int index)49628     public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
49629         int index) {
49630       return procedure_.get(index);
49631     }
49632 
initFields()49633     private void initFields() {
49634       procedure_ = java.util.Collections.emptyList();
49635     }
49636     private byte memoizedIsInitialized = -1;
isInitialized()49637     public final boolean isInitialized() {
49638       byte isInitialized = memoizedIsInitialized;
49639       if (isInitialized != -1) return isInitialized == 1;
49640 
49641       for (int i = 0; i < getProcedureCount(); i++) {
49642         if (!getProcedure(i).isInitialized()) {
49643           memoizedIsInitialized = 0;
49644           return false;
49645         }
49646       }
49647       memoizedIsInitialized = 1;
49648       return true;
49649     }
49650 
writeTo(com.google.protobuf.CodedOutputStream output)49651     public void writeTo(com.google.protobuf.CodedOutputStream output)
49652                         throws java.io.IOException {
49653       getSerializedSize();
49654       for (int i = 0; i < procedure_.size(); i++) {
49655         output.writeMessage(1, procedure_.get(i));
49656       }
49657       getUnknownFields().writeTo(output);
49658     }
49659 
49660     private int memoizedSerializedSize = -1;
getSerializedSize()49661     public int getSerializedSize() {
49662       int size = memoizedSerializedSize;
49663       if (size != -1) return size;
49664 
49665       size = 0;
49666       for (int i = 0; i < procedure_.size(); i++) {
49667         size += com.google.protobuf.CodedOutputStream
49668           .computeMessageSize(1, procedure_.get(i));
49669       }
49670       size += getUnknownFields().getSerializedSize();
49671       memoizedSerializedSize = size;
49672       return size;
49673     }
49674 
49675     private static final long serialVersionUID = 0L;
49676     @java.lang.Override
writeReplace()49677     protected java.lang.Object writeReplace()
49678         throws java.io.ObjectStreamException {
49679       return super.writeReplace();
49680     }
49681 
49682     @java.lang.Override
equals(final java.lang.Object obj)49683     public boolean equals(final java.lang.Object obj) {
49684       if (obj == this) {
49685        return true;
49686       }
49687       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse)) {
49688         return super.equals(obj);
49689       }
49690       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) obj;
49691 
49692       boolean result = true;
49693       result = result && getProcedureList()
49694           .equals(other.getProcedureList());
49695       result = result &&
49696           getUnknownFields().equals(other.getUnknownFields());
49697       return result;
49698     }
49699 
49700     private int memoizedHashCode = 0;
49701     @java.lang.Override
hashCode()49702     public int hashCode() {
49703       if (memoizedHashCode != 0) {
49704         return memoizedHashCode;
49705       }
49706       int hash = 41;
49707       hash = (19 * hash) + getDescriptorForType().hashCode();
49708       if (getProcedureCount() > 0) {
49709         hash = (37 * hash) + PROCEDURE_FIELD_NUMBER;
49710         hash = (53 * hash) + getProcedureList().hashCode();
49711       }
49712       hash = (29 * hash) + getUnknownFields().hashCode();
49713       memoizedHashCode = hash;
49714       return hash;
49715     }
49716 
parseFrom( com.google.protobuf.ByteString data)49717     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
49718         com.google.protobuf.ByteString data)
49719         throws com.google.protobuf.InvalidProtocolBufferException {
49720       return PARSER.parseFrom(data);
49721     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49722     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
49723         com.google.protobuf.ByteString data,
49724         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49725         throws com.google.protobuf.InvalidProtocolBufferException {
49726       return PARSER.parseFrom(data, extensionRegistry);
49727     }
parseFrom(byte[] data)49728     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(byte[] data)
49729         throws com.google.protobuf.InvalidProtocolBufferException {
49730       return PARSER.parseFrom(data);
49731     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49732     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
49733         byte[] data,
49734         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49735         throws com.google.protobuf.InvalidProtocolBufferException {
49736       return PARSER.parseFrom(data, extensionRegistry);
49737     }
parseFrom(java.io.InputStream input)49738     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(java.io.InputStream input)
49739         throws java.io.IOException {
49740       return PARSER.parseFrom(input);
49741     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49742     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
49743         java.io.InputStream input,
49744         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49745         throws java.io.IOException {
49746       return PARSER.parseFrom(input, extensionRegistry);
49747     }
parseDelimitedFrom(java.io.InputStream input)49748     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(java.io.InputStream input)
49749         throws java.io.IOException {
49750       return PARSER.parseDelimitedFrom(input);
49751     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49752     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseDelimitedFrom(
49753         java.io.InputStream input,
49754         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49755         throws java.io.IOException {
49756       return PARSER.parseDelimitedFrom(input, extensionRegistry);
49757     }
parseFrom( com.google.protobuf.CodedInputStream input)49758     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
49759         com.google.protobuf.CodedInputStream input)
49760         throws java.io.IOException {
49761       return PARSER.parseFrom(input);
49762     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49763     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parseFrom(
49764         com.google.protobuf.CodedInputStream input,
49765         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49766         throws java.io.IOException {
49767       return PARSER.parseFrom(input, extensionRegistry);
49768     }
49769 
newBuilder()49770     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()49771     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse prototype)49772     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse prototype) {
49773       return newBuilder().mergeFrom(prototype);
49774     }
toBuilder()49775     public Builder toBuilder() { return newBuilder(this); }
49776 
49777     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)49778     protected Builder newBuilderForType(
49779         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
49780       Builder builder = new Builder(parent);
49781       return builder;
49782     }
49783     /**
49784      * Protobuf type {@code ListProceduresResponse}
49785      */
49786     public static final class Builder extends
49787         com.google.protobuf.GeneratedMessage.Builder<Builder>
49788        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponseOrBuilder {
49789       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()49790           getDescriptor() {
49791         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_descriptor;
49792       }
49793 
49794       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()49795           internalGetFieldAccessorTable() {
49796         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_fieldAccessorTable
49797             .ensureFieldAccessorsInitialized(
49798                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.Builder.class);
49799       }
49800 
49801       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.newBuilder()
Builder()49802       private Builder() {
49803         maybeForceBuilderInitialization();
49804       }
49805 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)49806       private Builder(
49807           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
49808         super(parent);
49809         maybeForceBuilderInitialization();
49810       }
maybeForceBuilderInitialization()49811       private void maybeForceBuilderInitialization() {
49812         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
49813           getProcedureFieldBuilder();
49814         }
49815       }
create()49816       private static Builder create() {
49817         return new Builder();
49818       }
49819 
clear()49820       public Builder clear() {
49821         super.clear();
49822         if (procedureBuilder_ == null) {
49823           procedure_ = java.util.Collections.emptyList();
49824           bitField0_ = (bitField0_ & ~0x00000001);
49825         } else {
49826           procedureBuilder_.clear();
49827         }
49828         return this;
49829       }
49830 
clone()49831       public Builder clone() {
49832         return create().mergeFrom(buildPartial());
49833       }
49834 
49835       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()49836           getDescriptorForType() {
49837         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_ListProceduresResponse_descriptor;
49838       }
49839 
getDefaultInstanceForType()49840       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse getDefaultInstanceForType() {
49841         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
49842       }
49843 
build()49844       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse build() {
49845         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse result = buildPartial();
49846         if (!result.isInitialized()) {
49847           throw newUninitializedMessageException(result);
49848         }
49849         return result;
49850       }
49851 
buildPartial()49852       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse buildPartial() {
49853         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse(this);
49854         int from_bitField0_ = bitField0_;
49855         if (procedureBuilder_ == null) {
49856           if (((bitField0_ & 0x00000001) == 0x00000001)) {
49857             procedure_ = java.util.Collections.unmodifiableList(procedure_);
49858             bitField0_ = (bitField0_ & ~0x00000001);
49859           }
49860           result.procedure_ = procedure_;
49861         } else {
49862           result.procedure_ = procedureBuilder_.build();
49863         }
49864         onBuilt();
49865         return result;
49866       }
49867 
mergeFrom(com.google.protobuf.Message other)49868       public Builder mergeFrom(com.google.protobuf.Message other) {
49869         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) {
49870           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse)other);
49871         } else {
49872           super.mergeFrom(other);
49873           return this;
49874         }
49875       }
49876 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other)49877       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse other) {
49878         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()) return this;
49879         if (procedureBuilder_ == null) {
49880           if (!other.procedure_.isEmpty()) {
49881             if (procedure_.isEmpty()) {
49882               procedure_ = other.procedure_;
49883               bitField0_ = (bitField0_ & ~0x00000001);
49884             } else {
49885               ensureProcedureIsMutable();
49886               procedure_.addAll(other.procedure_);
49887             }
49888             onChanged();
49889           }
49890         } else {
49891           if (!other.procedure_.isEmpty()) {
49892             if (procedureBuilder_.isEmpty()) {
49893               procedureBuilder_.dispose();
49894               procedureBuilder_ = null;
49895               procedure_ = other.procedure_;
49896               bitField0_ = (bitField0_ & ~0x00000001);
49897               procedureBuilder_ =
49898                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
49899                    getProcedureFieldBuilder() : null;
49900             } else {
49901               procedureBuilder_.addAllMessages(other.procedure_);
49902             }
49903           }
49904         }
49905         this.mergeUnknownFields(other.getUnknownFields());
49906         return this;
49907       }
49908 
isInitialized()49909       public final boolean isInitialized() {
49910         for (int i = 0; i < getProcedureCount(); i++) {
49911           if (!getProcedure(i).isInitialized()) {
49912 
49913             return false;
49914           }
49915         }
49916         return true;
49917       }
49918 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)49919       public Builder mergeFrom(
49920           com.google.protobuf.CodedInputStream input,
49921           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
49922           throws java.io.IOException {
49923         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse parsedMessage = null;
49924         try {
49925           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
49926         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
49927           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) e.getUnfinishedMessage();
49928           throw e;
49929         } finally {
49930           if (parsedMessage != null) {
49931             mergeFrom(parsedMessage);
49932           }
49933         }
49934         return this;
49935       }
49936       private int bitField0_;
49937 
49938       // repeated .Procedure procedure = 1;
49939       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> procedure_ =
49940         java.util.Collections.emptyList();
ensureProcedureIsMutable()49941       private void ensureProcedureIsMutable() {
49942         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
49943           procedure_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure>(procedure_);
49944           bitField0_ |= 0x00000001;
49945          }
49946       }
49947 
49948       private com.google.protobuf.RepeatedFieldBuilder<
49949           org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder> procedureBuilder_;
49950 
49951       /**
49952        * <code>repeated .Procedure procedure = 1;</code>
49953        */
getProcedureList()49954       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> getProcedureList() {
49955         if (procedureBuilder_ == null) {
49956           return java.util.Collections.unmodifiableList(procedure_);
49957         } else {
49958           return procedureBuilder_.getMessageList();
49959         }
49960       }
49961       /**
49962        * <code>repeated .Procedure procedure = 1;</code>
49963        */
getProcedureCount()49964       public int getProcedureCount() {
49965         if (procedureBuilder_ == null) {
49966           return procedure_.size();
49967         } else {
49968           return procedureBuilder_.getCount();
49969         }
49970       }
49971       /**
49972        * <code>repeated .Procedure procedure = 1;</code>
49973        */
getProcedure(int index)49974       public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure getProcedure(int index) {
49975         if (procedureBuilder_ == null) {
49976           return procedure_.get(index);
49977         } else {
49978           return procedureBuilder_.getMessage(index);
49979         }
49980       }
49981       /**
49982        * <code>repeated .Procedure procedure = 1;</code>
49983        */
setProcedure( int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value)49984       public Builder setProcedure(
49985           int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
49986         if (procedureBuilder_ == null) {
49987           if (value == null) {
49988             throw new NullPointerException();
49989           }
49990           ensureProcedureIsMutable();
49991           procedure_.set(index, value);
49992           onChanged();
49993         } else {
49994           procedureBuilder_.setMessage(index, value);
49995         }
49996         return this;
49997       }
49998       /**
49999        * <code>repeated .Procedure procedure = 1;</code>
50000        */
setProcedure( int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue)50001       public Builder setProcedure(
50002           int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
50003         if (procedureBuilder_ == null) {
50004           ensureProcedureIsMutable();
50005           procedure_.set(index, builderForValue.build());
50006           onChanged();
50007         } else {
50008           procedureBuilder_.setMessage(index, builderForValue.build());
50009         }
50010         return this;
50011       }
50012       /**
50013        * <code>repeated .Procedure procedure = 1;</code>
50014        */
addProcedure(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value)50015       public Builder addProcedure(org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
50016         if (procedureBuilder_ == null) {
50017           if (value == null) {
50018             throw new NullPointerException();
50019           }
50020           ensureProcedureIsMutable();
50021           procedure_.add(value);
50022           onChanged();
50023         } else {
50024           procedureBuilder_.addMessage(value);
50025         }
50026         return this;
50027       }
50028       /**
50029        * <code>repeated .Procedure procedure = 1;</code>
50030        */
addProcedure( int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value)50031       public Builder addProcedure(
50032           int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure value) {
50033         if (procedureBuilder_ == null) {
50034           if (value == null) {
50035             throw new NullPointerException();
50036           }
50037           ensureProcedureIsMutable();
50038           procedure_.add(index, value);
50039           onChanged();
50040         } else {
50041           procedureBuilder_.addMessage(index, value);
50042         }
50043         return this;
50044       }
50045       /**
50046        * <code>repeated .Procedure procedure = 1;</code>
50047        */
addProcedure( org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue)50048       public Builder addProcedure(
50049           org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
50050         if (procedureBuilder_ == null) {
50051           ensureProcedureIsMutable();
50052           procedure_.add(builderForValue.build());
50053           onChanged();
50054         } else {
50055           procedureBuilder_.addMessage(builderForValue.build());
50056         }
50057         return this;
50058       }
50059       /**
50060        * <code>repeated .Procedure procedure = 1;</code>
50061        */
addProcedure( int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue)50062       public Builder addProcedure(
50063           int index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder builderForValue) {
50064         if (procedureBuilder_ == null) {
50065           ensureProcedureIsMutable();
50066           procedure_.add(index, builderForValue.build());
50067           onChanged();
50068         } else {
50069           procedureBuilder_.addMessage(index, builderForValue.build());
50070         }
50071         return this;
50072       }
50073       /**
50074        * <code>repeated .Procedure procedure = 1;</code>
50075        */
addAllProcedure( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> values)50076       public Builder addAllProcedure(
50077           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure> values) {
50078         if (procedureBuilder_ == null) {
50079           ensureProcedureIsMutable();
50080           super.addAll(values, procedure_);
50081           onChanged();
50082         } else {
50083           procedureBuilder_.addAllMessages(values);
50084         }
50085         return this;
50086       }
50087       /**
50088        * <code>repeated .Procedure procedure = 1;</code>
50089        */
clearProcedure()50090       public Builder clearProcedure() {
50091         if (procedureBuilder_ == null) {
50092           procedure_ = java.util.Collections.emptyList();
50093           bitField0_ = (bitField0_ & ~0x00000001);
50094           onChanged();
50095         } else {
50096           procedureBuilder_.clear();
50097         }
50098         return this;
50099       }
50100       /**
50101        * <code>repeated .Procedure procedure = 1;</code>
50102        */
removeProcedure(int index)50103       public Builder removeProcedure(int index) {
50104         if (procedureBuilder_ == null) {
50105           ensureProcedureIsMutable();
50106           procedure_.remove(index);
50107           onChanged();
50108         } else {
50109           procedureBuilder_.remove(index);
50110         }
50111         return this;
50112       }
50113       /**
50114        * <code>repeated .Procedure procedure = 1;</code>
50115        */
getProcedureBuilder( int index)50116       public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder getProcedureBuilder(
50117           int index) {
50118         return getProcedureFieldBuilder().getBuilder(index);
50119       }
50120       /**
50121        * <code>repeated .Procedure procedure = 1;</code>
50122        */
getProcedureOrBuilder( int index)50123       public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder getProcedureOrBuilder(
50124           int index) {
50125         if (procedureBuilder_ == null) {
50126           return procedure_.get(index);  } else {
50127           return procedureBuilder_.getMessageOrBuilder(index);
50128         }
50129       }
50130       /**
50131        * <code>repeated .Procedure procedure = 1;</code>
50132        */
50133       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getProcedureOrBuilderList()50134            getProcedureOrBuilderList() {
50135         if (procedureBuilder_ != null) {
50136           return procedureBuilder_.getMessageOrBuilderList();
50137         } else {
50138           return java.util.Collections.unmodifiableList(procedure_);
50139         }
50140       }
50141       /**
50142        * <code>repeated .Procedure procedure = 1;</code>
50143        */
addProcedureBuilder()50144       public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder() {
50145         return getProcedureFieldBuilder().addBuilder(
50146             org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
50147       }
50148       /**
50149        * <code>repeated .Procedure procedure = 1;</code>
50150        */
addProcedureBuilder( int index)50151       public org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder addProcedureBuilder(
50152           int index) {
50153         return getProcedureFieldBuilder().addBuilder(
50154             index, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.getDefaultInstance());
50155       }
50156       /**
50157        * <code>repeated .Procedure procedure = 1;</code>
50158        */
50159       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder>
getProcedureBuilderList()50160            getProcedureBuilderList() {
50161         return getProcedureFieldBuilder().getBuilderList();
50162       }
50163       private com.google.protobuf.RepeatedFieldBuilder<
50164           org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>
getProcedureFieldBuilder()50165           getProcedureFieldBuilder() {
50166         if (procedureBuilder_ == null) {
50167           procedureBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
50168               org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.Procedure.Builder, org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.ProcedureOrBuilder>(
50169                   procedure_,
50170                   ((bitField0_ & 0x00000001) == 0x00000001),
50171                   getParentForChildren(),
50172                   isClean());
50173           procedure_ = null;
50174         }
50175         return procedureBuilder_;
50176       }
50177 
50178       // @@protoc_insertion_point(builder_scope:ListProceduresResponse)
50179     }
50180 
50181     static {
50182       defaultInstance = new ListProceduresResponse(true);
defaultInstance.initFields()50183       defaultInstance.initFields();
50184     }
50185 
50186     // @@protoc_insertion_point(class_scope:ListProceduresResponse)
50187   }
50188 
50189   public interface SetQuotaRequestOrBuilder
50190       extends com.google.protobuf.MessageOrBuilder {
50191 
50192     // optional string user_name = 1;
50193     /**
50194      * <code>optional string user_name = 1;</code>
50195      */
hasUserName()50196     boolean hasUserName();
50197     /**
50198      * <code>optional string user_name = 1;</code>
50199      */
getUserName()50200     java.lang.String getUserName();
50201     /**
50202      * <code>optional string user_name = 1;</code>
50203      */
50204     com.google.protobuf.ByteString
getUserNameBytes()50205         getUserNameBytes();
50206 
50207     // optional string user_group = 2;
50208     /**
50209      * <code>optional string user_group = 2;</code>
50210      */
hasUserGroup()50211     boolean hasUserGroup();
50212     /**
50213      * <code>optional string user_group = 2;</code>
50214      */
getUserGroup()50215     java.lang.String getUserGroup();
50216     /**
50217      * <code>optional string user_group = 2;</code>
50218      */
50219     com.google.protobuf.ByteString
getUserGroupBytes()50220         getUserGroupBytes();
50221 
50222     // optional string namespace = 3;
50223     /**
50224      * <code>optional string namespace = 3;</code>
50225      */
hasNamespace()50226     boolean hasNamespace();
50227     /**
50228      * <code>optional string namespace = 3;</code>
50229      */
getNamespace()50230     java.lang.String getNamespace();
50231     /**
50232      * <code>optional string namespace = 3;</code>
50233      */
50234     com.google.protobuf.ByteString
getNamespaceBytes()50235         getNamespaceBytes();
50236 
50237     // optional .TableName table_name = 4;
50238     /**
50239      * <code>optional .TableName table_name = 4;</code>
50240      */
hasTableName()50241     boolean hasTableName();
50242     /**
50243      * <code>optional .TableName table_name = 4;</code>
50244      */
getTableName()50245     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
50246     /**
50247      * <code>optional .TableName table_name = 4;</code>
50248      */
getTableNameOrBuilder()50249     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
50250 
50251     // optional bool remove_all = 5;
50252     /**
50253      * <code>optional bool remove_all = 5;</code>
50254      */
hasRemoveAll()50255     boolean hasRemoveAll();
50256     /**
50257      * <code>optional bool remove_all = 5;</code>
50258      */
getRemoveAll()50259     boolean getRemoveAll();
50260 
50261     // optional bool bypass_globals = 6;
50262     /**
50263      * <code>optional bool bypass_globals = 6;</code>
50264      */
hasBypassGlobals()50265     boolean hasBypassGlobals();
50266     /**
50267      * <code>optional bool bypass_globals = 6;</code>
50268      */
getBypassGlobals()50269     boolean getBypassGlobals();
50270 
50271     // optional .ThrottleRequest throttle = 7;
50272     /**
50273      * <code>optional .ThrottleRequest throttle = 7;</code>
50274      */
hasThrottle()50275     boolean hasThrottle();
50276     /**
50277      * <code>optional .ThrottleRequest throttle = 7;</code>
50278      */
getThrottle()50279     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest getThrottle();
50280     /**
50281      * <code>optional .ThrottleRequest throttle = 7;</code>
50282      */
getThrottleOrBuilder()50283     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder getThrottleOrBuilder();
50284   }
50285   /**
50286    * Protobuf type {@code SetQuotaRequest}
50287    */
50288   public static final class SetQuotaRequest extends
50289       com.google.protobuf.GeneratedMessage
50290       implements SetQuotaRequestOrBuilder {
50291     // Use SetQuotaRequest.newBuilder() to construct.
SetQuotaRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)50292     private SetQuotaRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
50293       super(builder);
50294       this.unknownFields = builder.getUnknownFields();
50295     }
SetQuotaRequest(boolean noInit)50296     private SetQuotaRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
50297 
50298     private static final SetQuotaRequest defaultInstance;
getDefaultInstance()50299     public static SetQuotaRequest getDefaultInstance() {
50300       return defaultInstance;
50301     }
50302 
getDefaultInstanceForType()50303     public SetQuotaRequest getDefaultInstanceForType() {
50304       return defaultInstance;
50305     }
50306 
50307     private final com.google.protobuf.UnknownFieldSet unknownFields;
50308     @java.lang.Override
50309     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()50310         getUnknownFields() {
50311       return this.unknownFields;
50312     }
SetQuotaRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)50313     private SetQuotaRequest(
50314         com.google.protobuf.CodedInputStream input,
50315         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50316         throws com.google.protobuf.InvalidProtocolBufferException {
50317       initFields();
50318       int mutable_bitField0_ = 0;
50319       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
50320           com.google.protobuf.UnknownFieldSet.newBuilder();
50321       try {
50322         boolean done = false;
50323         while (!done) {
50324           int tag = input.readTag();
50325           switch (tag) {
50326             case 0:
50327               done = true;
50328               break;
50329             default: {
50330               if (!parseUnknownField(input, unknownFields,
50331                                      extensionRegistry, tag)) {
50332                 done = true;
50333               }
50334               break;
50335             }
50336             case 10: {
50337               bitField0_ |= 0x00000001;
50338               userName_ = input.readBytes();
50339               break;
50340             }
50341             case 18: {
50342               bitField0_ |= 0x00000002;
50343               userGroup_ = input.readBytes();
50344               break;
50345             }
50346             case 26: {
50347               bitField0_ |= 0x00000004;
50348               namespace_ = input.readBytes();
50349               break;
50350             }
50351             case 34: {
50352               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
50353               if (((bitField0_ & 0x00000008) == 0x00000008)) {
50354                 subBuilder = tableName_.toBuilder();
50355               }
50356               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
50357               if (subBuilder != null) {
50358                 subBuilder.mergeFrom(tableName_);
50359                 tableName_ = subBuilder.buildPartial();
50360               }
50361               bitField0_ |= 0x00000008;
50362               break;
50363             }
50364             case 40: {
50365               bitField0_ |= 0x00000010;
50366               removeAll_ = input.readBool();
50367               break;
50368             }
50369             case 48: {
50370               bitField0_ |= 0x00000020;
50371               bypassGlobals_ = input.readBool();
50372               break;
50373             }
50374             case 58: {
50375               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder subBuilder = null;
50376               if (((bitField0_ & 0x00000040) == 0x00000040)) {
50377                 subBuilder = throttle_.toBuilder();
50378               }
50379               throttle_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.PARSER, extensionRegistry);
50380               if (subBuilder != null) {
50381                 subBuilder.mergeFrom(throttle_);
50382                 throttle_ = subBuilder.buildPartial();
50383               }
50384               bitField0_ |= 0x00000040;
50385               break;
50386             }
50387           }
50388         }
50389       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
50390         throw e.setUnfinishedMessage(this);
50391       } catch (java.io.IOException e) {
50392         throw new com.google.protobuf.InvalidProtocolBufferException(
50393             e.getMessage()).setUnfinishedMessage(this);
50394       } finally {
50395         this.unknownFields = unknownFields.build();
50396         makeExtensionsImmutable();
50397       }
50398     }
50399     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()50400         getDescriptor() {
50401       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaRequest_descriptor;
50402     }
50403 
50404     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()50405         internalGetFieldAccessorTable() {
50406       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaRequest_fieldAccessorTable
50407           .ensureFieldAccessorsInitialized(
50408               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.Builder.class);
50409     }
50410 
50411     public static com.google.protobuf.Parser<SetQuotaRequest> PARSER =
50412         new com.google.protobuf.AbstractParser<SetQuotaRequest>() {
50413       public SetQuotaRequest parsePartialFrom(
50414           com.google.protobuf.CodedInputStream input,
50415           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50416           throws com.google.protobuf.InvalidProtocolBufferException {
50417         return new SetQuotaRequest(input, extensionRegistry);
50418       }
50419     };
50420 
50421     @java.lang.Override
getParserForType()50422     public com.google.protobuf.Parser<SetQuotaRequest> getParserForType() {
50423       return PARSER;
50424     }
50425 
50426     private int bitField0_;
50427     // optional string user_name = 1;
50428     public static final int USER_NAME_FIELD_NUMBER = 1;
50429     private java.lang.Object userName_;
50430     /**
50431      * <code>optional string user_name = 1;</code>
50432      */
hasUserName()50433     public boolean hasUserName() {
50434       return ((bitField0_ & 0x00000001) == 0x00000001);
50435     }
50436     /**
50437      * <code>optional string user_name = 1;</code>
50438      */
getUserName()50439     public java.lang.String getUserName() {
50440       java.lang.Object ref = userName_;
50441       if (ref instanceof java.lang.String) {
50442         return (java.lang.String) ref;
50443       } else {
50444         com.google.protobuf.ByteString bs =
50445             (com.google.protobuf.ByteString) ref;
50446         java.lang.String s = bs.toStringUtf8();
50447         if (bs.isValidUtf8()) {
50448           userName_ = s;
50449         }
50450         return s;
50451       }
50452     }
50453     /**
50454      * <code>optional string user_name = 1;</code>
50455      */
50456     public com.google.protobuf.ByteString
getUserNameBytes()50457         getUserNameBytes() {
50458       java.lang.Object ref = userName_;
50459       if (ref instanceof java.lang.String) {
50460         com.google.protobuf.ByteString b =
50461             com.google.protobuf.ByteString.copyFromUtf8(
50462                 (java.lang.String) ref);
50463         userName_ = b;
50464         return b;
50465       } else {
50466         return (com.google.protobuf.ByteString) ref;
50467       }
50468     }
50469 
50470     // optional string user_group = 2;
50471     public static final int USER_GROUP_FIELD_NUMBER = 2;
50472     private java.lang.Object userGroup_;
50473     /**
50474      * <code>optional string user_group = 2;</code>
50475      */
hasUserGroup()50476     public boolean hasUserGroup() {
50477       return ((bitField0_ & 0x00000002) == 0x00000002);
50478     }
50479     /**
50480      * <code>optional string user_group = 2;</code>
50481      */
getUserGroup()50482     public java.lang.String getUserGroup() {
50483       java.lang.Object ref = userGroup_;
50484       if (ref instanceof java.lang.String) {
50485         return (java.lang.String) ref;
50486       } else {
50487         com.google.protobuf.ByteString bs =
50488             (com.google.protobuf.ByteString) ref;
50489         java.lang.String s = bs.toStringUtf8();
50490         if (bs.isValidUtf8()) {
50491           userGroup_ = s;
50492         }
50493         return s;
50494       }
50495     }
50496     /**
50497      * <code>optional string user_group = 2;</code>
50498      */
50499     public com.google.protobuf.ByteString
getUserGroupBytes()50500         getUserGroupBytes() {
50501       java.lang.Object ref = userGroup_;
50502       if (ref instanceof java.lang.String) {
50503         com.google.protobuf.ByteString b =
50504             com.google.protobuf.ByteString.copyFromUtf8(
50505                 (java.lang.String) ref);
50506         userGroup_ = b;
50507         return b;
50508       } else {
50509         return (com.google.protobuf.ByteString) ref;
50510       }
50511     }
50512 
50513     // optional string namespace = 3;
50514     public static final int NAMESPACE_FIELD_NUMBER = 3;
50515     private java.lang.Object namespace_;
50516     /**
50517      * <code>optional string namespace = 3;</code>
50518      */
hasNamespace()50519     public boolean hasNamespace() {
50520       return ((bitField0_ & 0x00000004) == 0x00000004);
50521     }
50522     /**
50523      * <code>optional string namespace = 3;</code>
50524      */
getNamespace()50525     public java.lang.String getNamespace() {
50526       java.lang.Object ref = namespace_;
50527       if (ref instanceof java.lang.String) {
50528         return (java.lang.String) ref;
50529       } else {
50530         com.google.protobuf.ByteString bs =
50531             (com.google.protobuf.ByteString) ref;
50532         java.lang.String s = bs.toStringUtf8();
50533         if (bs.isValidUtf8()) {
50534           namespace_ = s;
50535         }
50536         return s;
50537       }
50538     }
50539     /**
50540      * <code>optional string namespace = 3;</code>
50541      */
50542     public com.google.protobuf.ByteString
getNamespaceBytes()50543         getNamespaceBytes() {
50544       java.lang.Object ref = namespace_;
50545       if (ref instanceof java.lang.String) {
50546         com.google.protobuf.ByteString b =
50547             com.google.protobuf.ByteString.copyFromUtf8(
50548                 (java.lang.String) ref);
50549         namespace_ = b;
50550         return b;
50551       } else {
50552         return (com.google.protobuf.ByteString) ref;
50553       }
50554     }
50555 
50556     // optional .TableName table_name = 4;
50557     public static final int TABLE_NAME_FIELD_NUMBER = 4;
50558     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
50559     /**
50560      * <code>optional .TableName table_name = 4;</code>
50561      */
hasTableName()50562     public boolean hasTableName() {
50563       return ((bitField0_ & 0x00000008) == 0x00000008);
50564     }
50565     /**
50566      * <code>optional .TableName table_name = 4;</code>
50567      */
getTableName()50568     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
50569       return tableName_;
50570     }
50571     /**
50572      * <code>optional .TableName table_name = 4;</code>
50573      */
getTableNameOrBuilder()50574     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
50575       return tableName_;
50576     }
50577 
50578     // optional bool remove_all = 5;
50579     public static final int REMOVE_ALL_FIELD_NUMBER = 5;
50580     private boolean removeAll_;
50581     /**
50582      * <code>optional bool remove_all = 5;</code>
50583      */
hasRemoveAll()50584     public boolean hasRemoveAll() {
50585       return ((bitField0_ & 0x00000010) == 0x00000010);
50586     }
50587     /**
50588      * <code>optional bool remove_all = 5;</code>
50589      */
getRemoveAll()50590     public boolean getRemoveAll() {
50591       return removeAll_;
50592     }
50593 
50594     // optional bool bypass_globals = 6;
50595     public static final int BYPASS_GLOBALS_FIELD_NUMBER = 6;
50596     private boolean bypassGlobals_;
50597     /**
50598      * <code>optional bool bypass_globals = 6;</code>
50599      */
hasBypassGlobals()50600     public boolean hasBypassGlobals() {
50601       return ((bitField0_ & 0x00000020) == 0x00000020);
50602     }
50603     /**
50604      * <code>optional bool bypass_globals = 6;</code>
50605      */
getBypassGlobals()50606     public boolean getBypassGlobals() {
50607       return bypassGlobals_;
50608     }
50609 
50610     // optional .ThrottleRequest throttle = 7;
50611     public static final int THROTTLE_FIELD_NUMBER = 7;
50612     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest throttle_;
50613     /**
50614      * <code>optional .ThrottleRequest throttle = 7;</code>
50615      */
hasThrottle()50616     public boolean hasThrottle() {
50617       return ((bitField0_ & 0x00000040) == 0x00000040);
50618     }
50619     /**
50620      * <code>optional .ThrottleRequest throttle = 7;</code>
50621      */
getThrottle()50622     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest getThrottle() {
50623       return throttle_;
50624     }
50625     /**
50626      * <code>optional .ThrottleRequest throttle = 7;</code>
50627      */
getThrottleOrBuilder()50628     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder getThrottleOrBuilder() {
50629       return throttle_;
50630     }
50631 
initFields()50632     private void initFields() {
50633       userName_ = "";
50634       userGroup_ = "";
50635       namespace_ = "";
50636       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
50637       removeAll_ = false;
50638       bypassGlobals_ = false;
50639       throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance();
50640     }
50641     private byte memoizedIsInitialized = -1;
isInitialized()50642     public final boolean isInitialized() {
50643       byte isInitialized = memoizedIsInitialized;
50644       if (isInitialized != -1) return isInitialized == 1;
50645 
50646       if (hasTableName()) {
50647         if (!getTableName().isInitialized()) {
50648           memoizedIsInitialized = 0;
50649           return false;
50650         }
50651       }
50652       if (hasThrottle()) {
50653         if (!getThrottle().isInitialized()) {
50654           memoizedIsInitialized = 0;
50655           return false;
50656         }
50657       }
50658       memoizedIsInitialized = 1;
50659       return true;
50660     }
50661 
writeTo(com.google.protobuf.CodedOutputStream output)50662     public void writeTo(com.google.protobuf.CodedOutputStream output)
50663                         throws java.io.IOException {
50664       getSerializedSize();
50665       if (((bitField0_ & 0x00000001) == 0x00000001)) {
50666         output.writeBytes(1, getUserNameBytes());
50667       }
50668       if (((bitField0_ & 0x00000002) == 0x00000002)) {
50669         output.writeBytes(2, getUserGroupBytes());
50670       }
50671       if (((bitField0_ & 0x00000004) == 0x00000004)) {
50672         output.writeBytes(3, getNamespaceBytes());
50673       }
50674       if (((bitField0_ & 0x00000008) == 0x00000008)) {
50675         output.writeMessage(4, tableName_);
50676       }
50677       if (((bitField0_ & 0x00000010) == 0x00000010)) {
50678         output.writeBool(5, removeAll_);
50679       }
50680       if (((bitField0_ & 0x00000020) == 0x00000020)) {
50681         output.writeBool(6, bypassGlobals_);
50682       }
50683       if (((bitField0_ & 0x00000040) == 0x00000040)) {
50684         output.writeMessage(7, throttle_);
50685       }
50686       getUnknownFields().writeTo(output);
50687     }
50688 
50689     private int memoizedSerializedSize = -1;
getSerializedSize()50690     public int getSerializedSize() {
50691       int size = memoizedSerializedSize;
50692       if (size != -1) return size;
50693 
50694       size = 0;
50695       if (((bitField0_ & 0x00000001) == 0x00000001)) {
50696         size += com.google.protobuf.CodedOutputStream
50697           .computeBytesSize(1, getUserNameBytes());
50698       }
50699       if (((bitField0_ & 0x00000002) == 0x00000002)) {
50700         size += com.google.protobuf.CodedOutputStream
50701           .computeBytesSize(2, getUserGroupBytes());
50702       }
50703       if (((bitField0_ & 0x00000004) == 0x00000004)) {
50704         size += com.google.protobuf.CodedOutputStream
50705           .computeBytesSize(3, getNamespaceBytes());
50706       }
50707       if (((bitField0_ & 0x00000008) == 0x00000008)) {
50708         size += com.google.protobuf.CodedOutputStream
50709           .computeMessageSize(4, tableName_);
50710       }
50711       if (((bitField0_ & 0x00000010) == 0x00000010)) {
50712         size += com.google.protobuf.CodedOutputStream
50713           .computeBoolSize(5, removeAll_);
50714       }
50715       if (((bitField0_ & 0x00000020) == 0x00000020)) {
50716         size += com.google.protobuf.CodedOutputStream
50717           .computeBoolSize(6, bypassGlobals_);
50718       }
50719       if (((bitField0_ & 0x00000040) == 0x00000040)) {
50720         size += com.google.protobuf.CodedOutputStream
50721           .computeMessageSize(7, throttle_);
50722       }
50723       size += getUnknownFields().getSerializedSize();
50724       memoizedSerializedSize = size;
50725       return size;
50726     }
50727 
50728     private static final long serialVersionUID = 0L;
50729     @java.lang.Override
writeReplace()50730     protected java.lang.Object writeReplace()
50731         throws java.io.ObjectStreamException {
50732       return super.writeReplace();
50733     }
50734 
50735     @java.lang.Override
equals(final java.lang.Object obj)50736     public boolean equals(final java.lang.Object obj) {
50737       if (obj == this) {
50738        return true;
50739       }
50740       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)) {
50741         return super.equals(obj);
50742       }
50743       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest) obj;
50744 
50745       boolean result = true;
50746       result = result && (hasUserName() == other.hasUserName());
50747       if (hasUserName()) {
50748         result = result && getUserName()
50749             .equals(other.getUserName());
50750       }
50751       result = result && (hasUserGroup() == other.hasUserGroup());
50752       if (hasUserGroup()) {
50753         result = result && getUserGroup()
50754             .equals(other.getUserGroup());
50755       }
50756       result = result && (hasNamespace() == other.hasNamespace());
50757       if (hasNamespace()) {
50758         result = result && getNamespace()
50759             .equals(other.getNamespace());
50760       }
50761       result = result && (hasTableName() == other.hasTableName());
50762       if (hasTableName()) {
50763         result = result && getTableName()
50764             .equals(other.getTableName());
50765       }
50766       result = result && (hasRemoveAll() == other.hasRemoveAll());
50767       if (hasRemoveAll()) {
50768         result = result && (getRemoveAll()
50769             == other.getRemoveAll());
50770       }
50771       result = result && (hasBypassGlobals() == other.hasBypassGlobals());
50772       if (hasBypassGlobals()) {
50773         result = result && (getBypassGlobals()
50774             == other.getBypassGlobals());
50775       }
50776       result = result && (hasThrottle() == other.hasThrottle());
50777       if (hasThrottle()) {
50778         result = result && getThrottle()
50779             .equals(other.getThrottle());
50780       }
50781       result = result &&
50782           getUnknownFields().equals(other.getUnknownFields());
50783       return result;
50784     }
50785 
50786     private int memoizedHashCode = 0;
50787     @java.lang.Override
hashCode()50788     public int hashCode() {
50789       if (memoizedHashCode != 0) {
50790         return memoizedHashCode;
50791       }
50792       int hash = 41;
50793       hash = (19 * hash) + getDescriptorForType().hashCode();
50794       if (hasUserName()) {
50795         hash = (37 * hash) + USER_NAME_FIELD_NUMBER;
50796         hash = (53 * hash) + getUserName().hashCode();
50797       }
50798       if (hasUserGroup()) {
50799         hash = (37 * hash) + USER_GROUP_FIELD_NUMBER;
50800         hash = (53 * hash) + getUserGroup().hashCode();
50801       }
50802       if (hasNamespace()) {
50803         hash = (37 * hash) + NAMESPACE_FIELD_NUMBER;
50804         hash = (53 * hash) + getNamespace().hashCode();
50805       }
50806       if (hasTableName()) {
50807         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
50808         hash = (53 * hash) + getTableName().hashCode();
50809       }
50810       if (hasRemoveAll()) {
50811         hash = (37 * hash) + REMOVE_ALL_FIELD_NUMBER;
50812         hash = (53 * hash) + hashBoolean(getRemoveAll());
50813       }
50814       if (hasBypassGlobals()) {
50815         hash = (37 * hash) + BYPASS_GLOBALS_FIELD_NUMBER;
50816         hash = (53 * hash) + hashBoolean(getBypassGlobals());
50817       }
50818       if (hasThrottle()) {
50819         hash = (37 * hash) + THROTTLE_FIELD_NUMBER;
50820         hash = (53 * hash) + getThrottle().hashCode();
50821       }
50822       hash = (29 * hash) + getUnknownFields().hashCode();
50823       memoizedHashCode = hash;
50824       return hash;
50825     }
50826 
parseFrom( com.google.protobuf.ByteString data)50827     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(
50828         com.google.protobuf.ByteString data)
50829         throws com.google.protobuf.InvalidProtocolBufferException {
50830       return PARSER.parseFrom(data);
50831     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)50832     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(
50833         com.google.protobuf.ByteString data,
50834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50835         throws com.google.protobuf.InvalidProtocolBufferException {
50836       return PARSER.parseFrom(data, extensionRegistry);
50837     }
parseFrom(byte[] data)50838     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(byte[] data)
50839         throws com.google.protobuf.InvalidProtocolBufferException {
50840       return PARSER.parseFrom(data);
50841     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)50842     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(
50843         byte[] data,
50844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50845         throws com.google.protobuf.InvalidProtocolBufferException {
50846       return PARSER.parseFrom(data, extensionRegistry);
50847     }
parseFrom(java.io.InputStream input)50848     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(java.io.InputStream input)
50849         throws java.io.IOException {
50850       return PARSER.parseFrom(input);
50851     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)50852     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(
50853         java.io.InputStream input,
50854         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50855         throws java.io.IOException {
50856       return PARSER.parseFrom(input, extensionRegistry);
50857     }
parseDelimitedFrom(java.io.InputStream input)50858     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseDelimitedFrom(java.io.InputStream input)
50859         throws java.io.IOException {
50860       return PARSER.parseDelimitedFrom(input);
50861     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)50862     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseDelimitedFrom(
50863         java.io.InputStream input,
50864         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50865         throws java.io.IOException {
50866       return PARSER.parseDelimitedFrom(input, extensionRegistry);
50867     }
parseFrom( com.google.protobuf.CodedInputStream input)50868     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(
50869         com.google.protobuf.CodedInputStream input)
50870         throws java.io.IOException {
50871       return PARSER.parseFrom(input);
50872     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)50873     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parseFrom(
50874         com.google.protobuf.CodedInputStream input,
50875         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
50876         throws java.io.IOException {
50877       return PARSER.parseFrom(input, extensionRegistry);
50878     }
50879 
newBuilder()50880     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()50881     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest prototype)50882     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest prototype) {
50883       return newBuilder().mergeFrom(prototype);
50884     }
toBuilder()50885     public Builder toBuilder() { return newBuilder(this); }
50886 
50887     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)50888     protected Builder newBuilderForType(
50889         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
50890       Builder builder = new Builder(parent);
50891       return builder;
50892     }
50893     /**
50894      * Protobuf type {@code SetQuotaRequest}
50895      */
50896     public static final class Builder extends
50897         com.google.protobuf.GeneratedMessage.Builder<Builder>
50898        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequestOrBuilder {
50899       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()50900           getDescriptor() {
50901         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaRequest_descriptor;
50902       }
50903 
50904       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()50905           internalGetFieldAccessorTable() {
50906         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaRequest_fieldAccessorTable
50907             .ensureFieldAccessorsInitialized(
50908                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.Builder.class);
50909       }
50910 
50911       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.newBuilder()
Builder()50912       private Builder() {
50913         maybeForceBuilderInitialization();
50914       }
50915 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)50916       private Builder(
50917           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
50918         super(parent);
50919         maybeForceBuilderInitialization();
50920       }
maybeForceBuilderInitialization()50921       private void maybeForceBuilderInitialization() {
50922         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
50923           getTableNameFieldBuilder();
50924           getThrottleFieldBuilder();
50925         }
50926       }
create()50927       private static Builder create() {
50928         return new Builder();
50929       }
50930 
clear()50931       public Builder clear() {
50932         super.clear();
50933         userName_ = "";
50934         bitField0_ = (bitField0_ & ~0x00000001);
50935         userGroup_ = "";
50936         bitField0_ = (bitField0_ & ~0x00000002);
50937         namespace_ = "";
50938         bitField0_ = (bitField0_ & ~0x00000004);
50939         if (tableNameBuilder_ == null) {
50940           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
50941         } else {
50942           tableNameBuilder_.clear();
50943         }
50944         bitField0_ = (bitField0_ & ~0x00000008);
50945         removeAll_ = false;
50946         bitField0_ = (bitField0_ & ~0x00000010);
50947         bypassGlobals_ = false;
50948         bitField0_ = (bitField0_ & ~0x00000020);
50949         if (throttleBuilder_ == null) {
50950           throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance();
50951         } else {
50952           throttleBuilder_.clear();
50953         }
50954         bitField0_ = (bitField0_ & ~0x00000040);
50955         return this;
50956       }
50957 
clone()50958       public Builder clone() {
50959         return create().mergeFrom(buildPartial());
50960       }
50961 
50962       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()50963           getDescriptorForType() {
50964         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaRequest_descriptor;
50965       }
50966 
getDefaultInstanceForType()50967       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest getDefaultInstanceForType() {
50968         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
50969       }
50970 
build()50971       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest build() {
50972         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest result = buildPartial();
50973         if (!result.isInitialized()) {
50974           throw newUninitializedMessageException(result);
50975         }
50976         return result;
50977       }
50978 
buildPartial()50979       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest buildPartial() {
50980         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest(this);
50981         int from_bitField0_ = bitField0_;
50982         int to_bitField0_ = 0;
50983         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
50984           to_bitField0_ |= 0x00000001;
50985         }
50986         result.userName_ = userName_;
50987         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
50988           to_bitField0_ |= 0x00000002;
50989         }
50990         result.userGroup_ = userGroup_;
50991         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
50992           to_bitField0_ |= 0x00000004;
50993         }
50994         result.namespace_ = namespace_;
50995         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
50996           to_bitField0_ |= 0x00000008;
50997         }
50998         if (tableNameBuilder_ == null) {
50999           result.tableName_ = tableName_;
51000         } else {
51001           result.tableName_ = tableNameBuilder_.build();
51002         }
51003         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
51004           to_bitField0_ |= 0x00000010;
51005         }
51006         result.removeAll_ = removeAll_;
51007         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
51008           to_bitField0_ |= 0x00000020;
51009         }
51010         result.bypassGlobals_ = bypassGlobals_;
51011         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
51012           to_bitField0_ |= 0x00000040;
51013         }
51014         if (throttleBuilder_ == null) {
51015           result.throttle_ = throttle_;
51016         } else {
51017           result.throttle_ = throttleBuilder_.build();
51018         }
51019         result.bitField0_ = to_bitField0_;
51020         onBuilt();
51021         return result;
51022       }
51023 
mergeFrom(com.google.protobuf.Message other)51024       public Builder mergeFrom(com.google.protobuf.Message other) {
51025         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest) {
51026           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)other);
51027         } else {
51028           super.mergeFrom(other);
51029           return this;
51030         }
51031       }
51032 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest other)51033       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest other) {
51034         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance()) return this;
51035         if (other.hasUserName()) {
51036           bitField0_ |= 0x00000001;
51037           userName_ = other.userName_;
51038           onChanged();
51039         }
51040         if (other.hasUserGroup()) {
51041           bitField0_ |= 0x00000002;
51042           userGroup_ = other.userGroup_;
51043           onChanged();
51044         }
51045         if (other.hasNamespace()) {
51046           bitField0_ |= 0x00000004;
51047           namespace_ = other.namespace_;
51048           onChanged();
51049         }
51050         if (other.hasTableName()) {
51051           mergeTableName(other.getTableName());
51052         }
51053         if (other.hasRemoveAll()) {
51054           setRemoveAll(other.getRemoveAll());
51055         }
51056         if (other.hasBypassGlobals()) {
51057           setBypassGlobals(other.getBypassGlobals());
51058         }
51059         if (other.hasThrottle()) {
51060           mergeThrottle(other.getThrottle());
51061         }
51062         this.mergeUnknownFields(other.getUnknownFields());
51063         return this;
51064       }
51065 
isInitialized()51066       public final boolean isInitialized() {
51067         if (hasTableName()) {
51068           if (!getTableName().isInitialized()) {
51069 
51070             return false;
51071           }
51072         }
51073         if (hasThrottle()) {
51074           if (!getThrottle().isInitialized()) {
51075 
51076             return false;
51077           }
51078         }
51079         return true;
51080       }
51081 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51082       public Builder mergeFrom(
51083           com.google.protobuf.CodedInputStream input,
51084           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51085           throws java.io.IOException {
51086         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest parsedMessage = null;
51087         try {
51088           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
51089         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
51090           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest) e.getUnfinishedMessage();
51091           throw e;
51092         } finally {
51093           if (parsedMessage != null) {
51094             mergeFrom(parsedMessage);
51095           }
51096         }
51097         return this;
51098       }
51099       private int bitField0_;
51100 
51101       // optional string user_name = 1;
51102       private java.lang.Object userName_ = "";
51103       /**
51104        * <code>optional string user_name = 1;</code>
51105        */
hasUserName()51106       public boolean hasUserName() {
51107         return ((bitField0_ & 0x00000001) == 0x00000001);
51108       }
51109       /**
51110        * <code>optional string user_name = 1;</code>
51111        */
getUserName()51112       public java.lang.String getUserName() {
51113         java.lang.Object ref = userName_;
51114         if (!(ref instanceof java.lang.String)) {
51115           java.lang.String s = ((com.google.protobuf.ByteString) ref)
51116               .toStringUtf8();
51117           userName_ = s;
51118           return s;
51119         } else {
51120           return (java.lang.String) ref;
51121         }
51122       }
51123       /**
51124        * <code>optional string user_name = 1;</code>
51125        */
51126       public com.google.protobuf.ByteString
getUserNameBytes()51127           getUserNameBytes() {
51128         java.lang.Object ref = userName_;
51129         if (ref instanceof String) {
51130           com.google.protobuf.ByteString b =
51131               com.google.protobuf.ByteString.copyFromUtf8(
51132                   (java.lang.String) ref);
51133           userName_ = b;
51134           return b;
51135         } else {
51136           return (com.google.protobuf.ByteString) ref;
51137         }
51138       }
51139       /**
51140        * <code>optional string user_name = 1;</code>
51141        */
setUserName( java.lang.String value)51142       public Builder setUserName(
51143           java.lang.String value) {
51144         if (value == null) {
51145     throw new NullPointerException();
51146   }
51147   bitField0_ |= 0x00000001;
51148         userName_ = value;
51149         onChanged();
51150         return this;
51151       }
51152       /**
51153        * <code>optional string user_name = 1;</code>
51154        */
clearUserName()51155       public Builder clearUserName() {
51156         bitField0_ = (bitField0_ & ~0x00000001);
51157         userName_ = getDefaultInstance().getUserName();
51158         onChanged();
51159         return this;
51160       }
51161       /**
51162        * <code>optional string user_name = 1;</code>
51163        */
setUserNameBytes( com.google.protobuf.ByteString value)51164       public Builder setUserNameBytes(
51165           com.google.protobuf.ByteString value) {
51166         if (value == null) {
51167     throw new NullPointerException();
51168   }
51169   bitField0_ |= 0x00000001;
51170         userName_ = value;
51171         onChanged();
51172         return this;
51173       }
51174 
51175       // optional string user_group = 2;
51176       private java.lang.Object userGroup_ = "";
51177       /**
51178        * <code>optional string user_group = 2;</code>
51179        */
hasUserGroup()51180       public boolean hasUserGroup() {
51181         return ((bitField0_ & 0x00000002) == 0x00000002);
51182       }
51183       /**
51184        * <code>optional string user_group = 2;</code>
51185        */
getUserGroup()51186       public java.lang.String getUserGroup() {
51187         java.lang.Object ref = userGroup_;
51188         if (!(ref instanceof java.lang.String)) {
51189           java.lang.String s = ((com.google.protobuf.ByteString) ref)
51190               .toStringUtf8();
51191           userGroup_ = s;
51192           return s;
51193         } else {
51194           return (java.lang.String) ref;
51195         }
51196       }
51197       /**
51198        * <code>optional string user_group = 2;</code>
51199        */
51200       public com.google.protobuf.ByteString
getUserGroupBytes()51201           getUserGroupBytes() {
51202         java.lang.Object ref = userGroup_;
51203         if (ref instanceof String) {
51204           com.google.protobuf.ByteString b =
51205               com.google.protobuf.ByteString.copyFromUtf8(
51206                   (java.lang.String) ref);
51207           userGroup_ = b;
51208           return b;
51209         } else {
51210           return (com.google.protobuf.ByteString) ref;
51211         }
51212       }
51213       /**
51214        * <code>optional string user_group = 2;</code>
51215        */
setUserGroup( java.lang.String value)51216       public Builder setUserGroup(
51217           java.lang.String value) {
51218         if (value == null) {
51219     throw new NullPointerException();
51220   }
51221   bitField0_ |= 0x00000002;
51222         userGroup_ = value;
51223         onChanged();
51224         return this;
51225       }
51226       /**
51227        * <code>optional string user_group = 2;</code>
51228        */
clearUserGroup()51229       public Builder clearUserGroup() {
51230         bitField0_ = (bitField0_ & ~0x00000002);
51231         userGroup_ = getDefaultInstance().getUserGroup();
51232         onChanged();
51233         return this;
51234       }
51235       /**
51236        * <code>optional string user_group = 2;</code>
51237        */
setUserGroupBytes( com.google.protobuf.ByteString value)51238       public Builder setUserGroupBytes(
51239           com.google.protobuf.ByteString value) {
51240         if (value == null) {
51241     throw new NullPointerException();
51242   }
51243   bitField0_ |= 0x00000002;
51244         userGroup_ = value;
51245         onChanged();
51246         return this;
51247       }
51248 
51249       // optional string namespace = 3;
51250       private java.lang.Object namespace_ = "";
51251       /**
51252        * <code>optional string namespace = 3;</code>
51253        */
hasNamespace()51254       public boolean hasNamespace() {
51255         return ((bitField0_ & 0x00000004) == 0x00000004);
51256       }
51257       /**
51258        * <code>optional string namespace = 3;</code>
51259        */
getNamespace()51260       public java.lang.String getNamespace() {
51261         java.lang.Object ref = namespace_;
51262         if (!(ref instanceof java.lang.String)) {
51263           java.lang.String s = ((com.google.protobuf.ByteString) ref)
51264               .toStringUtf8();
51265           namespace_ = s;
51266           return s;
51267         } else {
51268           return (java.lang.String) ref;
51269         }
51270       }
51271       /**
51272        * <code>optional string namespace = 3;</code>
51273        */
51274       public com.google.protobuf.ByteString
getNamespaceBytes()51275           getNamespaceBytes() {
51276         java.lang.Object ref = namespace_;
51277         if (ref instanceof String) {
51278           com.google.protobuf.ByteString b =
51279               com.google.protobuf.ByteString.copyFromUtf8(
51280                   (java.lang.String) ref);
51281           namespace_ = b;
51282           return b;
51283         } else {
51284           return (com.google.protobuf.ByteString) ref;
51285         }
51286       }
51287       /**
51288        * <code>optional string namespace = 3;</code>
51289        */
setNamespace( java.lang.String value)51290       public Builder setNamespace(
51291           java.lang.String value) {
51292         if (value == null) {
51293     throw new NullPointerException();
51294   }
51295   bitField0_ |= 0x00000004;
51296         namespace_ = value;
51297         onChanged();
51298         return this;
51299       }
51300       /**
51301        * <code>optional string namespace = 3;</code>
51302        */
clearNamespace()51303       public Builder clearNamespace() {
51304         bitField0_ = (bitField0_ & ~0x00000004);
51305         namespace_ = getDefaultInstance().getNamespace();
51306         onChanged();
51307         return this;
51308       }
51309       /**
51310        * <code>optional string namespace = 3;</code>
51311        */
setNamespaceBytes( com.google.protobuf.ByteString value)51312       public Builder setNamespaceBytes(
51313           com.google.protobuf.ByteString value) {
51314         if (value == null) {
51315     throw new NullPointerException();
51316   }
51317   bitField0_ |= 0x00000004;
51318         namespace_ = value;
51319         onChanged();
51320         return this;
51321       }
51322 
51323       // optional .TableName table_name = 4;
51324       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
51325       private com.google.protobuf.SingleFieldBuilder<
51326           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
51327       /**
51328        * <code>optional .TableName table_name = 4;</code>
51329        */
hasTableName()51330       public boolean hasTableName() {
51331         return ((bitField0_ & 0x00000008) == 0x00000008);
51332       }
51333       /**
51334        * <code>optional .TableName table_name = 4;</code>
51335        */
getTableName()51336       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
51337         if (tableNameBuilder_ == null) {
51338           return tableName_;
51339         } else {
51340           return tableNameBuilder_.getMessage();
51341         }
51342       }
51343       /**
51344        * <code>optional .TableName table_name = 4;</code>
51345        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)51346       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
51347         if (tableNameBuilder_ == null) {
51348           if (value == null) {
51349             throw new NullPointerException();
51350           }
51351           tableName_ = value;
51352           onChanged();
51353         } else {
51354           tableNameBuilder_.setMessage(value);
51355         }
51356         bitField0_ |= 0x00000008;
51357         return this;
51358       }
51359       /**
51360        * <code>optional .TableName table_name = 4;</code>
51361        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)51362       public Builder setTableName(
51363           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
51364         if (tableNameBuilder_ == null) {
51365           tableName_ = builderForValue.build();
51366           onChanged();
51367         } else {
51368           tableNameBuilder_.setMessage(builderForValue.build());
51369         }
51370         bitField0_ |= 0x00000008;
51371         return this;
51372       }
51373       /**
51374        * <code>optional .TableName table_name = 4;</code>
51375        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)51376       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
51377         if (tableNameBuilder_ == null) {
51378           if (((bitField0_ & 0x00000008) == 0x00000008) &&
51379               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
51380             tableName_ =
51381               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
51382           } else {
51383             tableName_ = value;
51384           }
51385           onChanged();
51386         } else {
51387           tableNameBuilder_.mergeFrom(value);
51388         }
51389         bitField0_ |= 0x00000008;
51390         return this;
51391       }
51392       /**
51393        * <code>optional .TableName table_name = 4;</code>
51394        */
clearTableName()51395       public Builder clearTableName() {
51396         if (tableNameBuilder_ == null) {
51397           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
51398           onChanged();
51399         } else {
51400           tableNameBuilder_.clear();
51401         }
51402         bitField0_ = (bitField0_ & ~0x00000008);
51403         return this;
51404       }
51405       /**
51406        * <code>optional .TableName table_name = 4;</code>
51407        */
getTableNameBuilder()51408       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
51409         bitField0_ |= 0x00000008;
51410         onChanged();
51411         return getTableNameFieldBuilder().getBuilder();
51412       }
51413       /**
51414        * <code>optional .TableName table_name = 4;</code>
51415        */
getTableNameOrBuilder()51416       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
51417         if (tableNameBuilder_ != null) {
51418           return tableNameBuilder_.getMessageOrBuilder();
51419         } else {
51420           return tableName_;
51421         }
51422       }
51423       /**
51424        * <code>optional .TableName table_name = 4;</code>
51425        */
51426       private com.google.protobuf.SingleFieldBuilder<
51427           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()51428           getTableNameFieldBuilder() {
51429         if (tableNameBuilder_ == null) {
51430           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
51431               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
51432                   tableName_,
51433                   getParentForChildren(),
51434                   isClean());
51435           tableName_ = null;
51436         }
51437         return tableNameBuilder_;
51438       }
51439 
51440       // optional bool remove_all = 5;
51441       private boolean removeAll_ ;
51442       /**
51443        * <code>optional bool remove_all = 5;</code>
51444        */
hasRemoveAll()51445       public boolean hasRemoveAll() {
51446         return ((bitField0_ & 0x00000010) == 0x00000010);
51447       }
51448       /**
51449        * <code>optional bool remove_all = 5;</code>
51450        */
getRemoveAll()51451       public boolean getRemoveAll() {
51452         return removeAll_;
51453       }
51454       /**
51455        * <code>optional bool remove_all = 5;</code>
51456        */
setRemoveAll(boolean value)51457       public Builder setRemoveAll(boolean value) {
51458         bitField0_ |= 0x00000010;
51459         removeAll_ = value;
51460         onChanged();
51461         return this;
51462       }
51463       /**
51464        * <code>optional bool remove_all = 5;</code>
51465        */
clearRemoveAll()51466       public Builder clearRemoveAll() {
51467         bitField0_ = (bitField0_ & ~0x00000010);
51468         removeAll_ = false;
51469         onChanged();
51470         return this;
51471       }
51472 
51473       // optional bool bypass_globals = 6;
51474       private boolean bypassGlobals_ ;
51475       /**
51476        * <code>optional bool bypass_globals = 6;</code>
51477        */
hasBypassGlobals()51478       public boolean hasBypassGlobals() {
51479         return ((bitField0_ & 0x00000020) == 0x00000020);
51480       }
51481       /**
51482        * <code>optional bool bypass_globals = 6;</code>
51483        */
getBypassGlobals()51484       public boolean getBypassGlobals() {
51485         return bypassGlobals_;
51486       }
51487       /**
51488        * <code>optional bool bypass_globals = 6;</code>
51489        */
setBypassGlobals(boolean value)51490       public Builder setBypassGlobals(boolean value) {
51491         bitField0_ |= 0x00000020;
51492         bypassGlobals_ = value;
51493         onChanged();
51494         return this;
51495       }
51496       /**
51497        * <code>optional bool bypass_globals = 6;</code>
51498        */
clearBypassGlobals()51499       public Builder clearBypassGlobals() {
51500         bitField0_ = (bitField0_ & ~0x00000020);
51501         bypassGlobals_ = false;
51502         onChanged();
51503         return this;
51504       }
51505 
51506       // optional .ThrottleRequest throttle = 7;
51507       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance();
51508       private com.google.protobuf.SingleFieldBuilder<
51509           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder> throttleBuilder_;
51510       /**
51511        * <code>optional .ThrottleRequest throttle = 7;</code>
51512        */
hasThrottle()51513       public boolean hasThrottle() {
51514         return ((bitField0_ & 0x00000040) == 0x00000040);
51515       }
51516       /**
51517        * <code>optional .ThrottleRequest throttle = 7;</code>
51518        */
getThrottle()51519       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest getThrottle() {
51520         if (throttleBuilder_ == null) {
51521           return throttle_;
51522         } else {
51523           return throttleBuilder_.getMessage();
51524         }
51525       }
51526       /**
51527        * <code>optional .ThrottleRequest throttle = 7;</code>
51528        */
setThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest value)51529       public Builder setThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest value) {
51530         if (throttleBuilder_ == null) {
51531           if (value == null) {
51532             throw new NullPointerException();
51533           }
51534           throttle_ = value;
51535           onChanged();
51536         } else {
51537           throttleBuilder_.setMessage(value);
51538         }
51539         bitField0_ |= 0x00000040;
51540         return this;
51541       }
51542       /**
51543        * <code>optional .ThrottleRequest throttle = 7;</code>
51544        */
setThrottle( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder builderForValue)51545       public Builder setThrottle(
51546           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder builderForValue) {
51547         if (throttleBuilder_ == null) {
51548           throttle_ = builderForValue.build();
51549           onChanged();
51550         } else {
51551           throttleBuilder_.setMessage(builderForValue.build());
51552         }
51553         bitField0_ |= 0x00000040;
51554         return this;
51555       }
51556       /**
51557        * <code>optional .ThrottleRequest throttle = 7;</code>
51558        */
mergeThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest value)51559       public Builder mergeThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest value) {
51560         if (throttleBuilder_ == null) {
51561           if (((bitField0_ & 0x00000040) == 0x00000040) &&
51562               throttle_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance()) {
51563             throttle_ =
51564               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.newBuilder(throttle_).mergeFrom(value).buildPartial();
51565           } else {
51566             throttle_ = value;
51567           }
51568           onChanged();
51569         } else {
51570           throttleBuilder_.mergeFrom(value);
51571         }
51572         bitField0_ |= 0x00000040;
51573         return this;
51574       }
51575       /**
51576        * <code>optional .ThrottleRequest throttle = 7;</code>
51577        */
clearThrottle()51578       public Builder clearThrottle() {
51579         if (throttleBuilder_ == null) {
51580           throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance();
51581           onChanged();
51582         } else {
51583           throttleBuilder_.clear();
51584         }
51585         bitField0_ = (bitField0_ & ~0x00000040);
51586         return this;
51587       }
51588       /**
51589        * <code>optional .ThrottleRequest throttle = 7;</code>
51590        */
getThrottleBuilder()51591       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder getThrottleBuilder() {
51592         bitField0_ |= 0x00000040;
51593         onChanged();
51594         return getThrottleFieldBuilder().getBuilder();
51595       }
51596       /**
51597        * <code>optional .ThrottleRequest throttle = 7;</code>
51598        */
getThrottleOrBuilder()51599       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder getThrottleOrBuilder() {
51600         if (throttleBuilder_ != null) {
51601           return throttleBuilder_.getMessageOrBuilder();
51602         } else {
51603           return throttle_;
51604         }
51605       }
51606       /**
51607        * <code>optional .ThrottleRequest throttle = 7;</code>
51608        */
51609       private com.google.protobuf.SingleFieldBuilder<
51610           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder>
getThrottleFieldBuilder()51611           getThrottleFieldBuilder() {
51612         if (throttleBuilder_ == null) {
51613           throttleBuilder_ = new com.google.protobuf.SingleFieldBuilder<
51614               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder>(
51615                   throttle_,
51616                   getParentForChildren(),
51617                   isClean());
51618           throttle_ = null;
51619         }
51620         return throttleBuilder_;
51621       }
51622 
51623       // @@protoc_insertion_point(builder_scope:SetQuotaRequest)
51624     }
51625 
51626     static {
51627       defaultInstance = new SetQuotaRequest(true);
defaultInstance.initFields()51628       defaultInstance.initFields();
51629     }
51630 
51631     // @@protoc_insertion_point(class_scope:SetQuotaRequest)
51632   }
51633 
51634   public interface SetQuotaResponseOrBuilder
51635       extends com.google.protobuf.MessageOrBuilder {
51636   }
51637   /**
51638    * Protobuf type {@code SetQuotaResponse}
51639    */
51640   public static final class SetQuotaResponse extends
51641       com.google.protobuf.GeneratedMessage
51642       implements SetQuotaResponseOrBuilder {
51643     // Use SetQuotaResponse.newBuilder() to construct.
SetQuotaResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)51644     private SetQuotaResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
51645       super(builder);
51646       this.unknownFields = builder.getUnknownFields();
51647     }
SetQuotaResponse(boolean noInit)51648     private SetQuotaResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
51649 
51650     private static final SetQuotaResponse defaultInstance;
getDefaultInstance()51651     public static SetQuotaResponse getDefaultInstance() {
51652       return defaultInstance;
51653     }
51654 
getDefaultInstanceForType()51655     public SetQuotaResponse getDefaultInstanceForType() {
51656       return defaultInstance;
51657     }
51658 
51659     private final com.google.protobuf.UnknownFieldSet unknownFields;
51660     @java.lang.Override
51661     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()51662         getUnknownFields() {
51663       return this.unknownFields;
51664     }
SetQuotaResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51665     private SetQuotaResponse(
51666         com.google.protobuf.CodedInputStream input,
51667         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51668         throws com.google.protobuf.InvalidProtocolBufferException {
51669       initFields();
51670       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
51671           com.google.protobuf.UnknownFieldSet.newBuilder();
51672       try {
51673         boolean done = false;
51674         while (!done) {
51675           int tag = input.readTag();
51676           switch (tag) {
51677             case 0:
51678               done = true;
51679               break;
51680             default: {
51681               if (!parseUnknownField(input, unknownFields,
51682                                      extensionRegistry, tag)) {
51683                 done = true;
51684               }
51685               break;
51686             }
51687           }
51688         }
51689       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
51690         throw e.setUnfinishedMessage(this);
51691       } catch (java.io.IOException e) {
51692         throw new com.google.protobuf.InvalidProtocolBufferException(
51693             e.getMessage()).setUnfinishedMessage(this);
51694       } finally {
51695         this.unknownFields = unknownFields.build();
51696         makeExtensionsImmutable();
51697       }
51698     }
51699     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()51700         getDescriptor() {
51701       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaResponse_descriptor;
51702     }
51703 
51704     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()51705         internalGetFieldAccessorTable() {
51706       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaResponse_fieldAccessorTable
51707           .ensureFieldAccessorsInitialized(
51708               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.Builder.class);
51709     }
51710 
51711     public static com.google.protobuf.Parser<SetQuotaResponse> PARSER =
51712         new com.google.protobuf.AbstractParser<SetQuotaResponse>() {
51713       public SetQuotaResponse parsePartialFrom(
51714           com.google.protobuf.CodedInputStream input,
51715           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51716           throws com.google.protobuf.InvalidProtocolBufferException {
51717         return new SetQuotaResponse(input, extensionRegistry);
51718       }
51719     };
51720 
51721     @java.lang.Override
getParserForType()51722     public com.google.protobuf.Parser<SetQuotaResponse> getParserForType() {
51723       return PARSER;
51724     }
51725 
initFields()51726     private void initFields() {
51727     }
51728     private byte memoizedIsInitialized = -1;
isInitialized()51729     public final boolean isInitialized() {
51730       byte isInitialized = memoizedIsInitialized;
51731       if (isInitialized != -1) return isInitialized == 1;
51732 
51733       memoizedIsInitialized = 1;
51734       return true;
51735     }
51736 
writeTo(com.google.protobuf.CodedOutputStream output)51737     public void writeTo(com.google.protobuf.CodedOutputStream output)
51738                         throws java.io.IOException {
51739       getSerializedSize();
51740       getUnknownFields().writeTo(output);
51741     }
51742 
51743     private int memoizedSerializedSize = -1;
getSerializedSize()51744     public int getSerializedSize() {
51745       int size = memoizedSerializedSize;
51746       if (size != -1) return size;
51747 
51748       size = 0;
51749       size += getUnknownFields().getSerializedSize();
51750       memoizedSerializedSize = size;
51751       return size;
51752     }
51753 
51754     private static final long serialVersionUID = 0L;
51755     @java.lang.Override
writeReplace()51756     protected java.lang.Object writeReplace()
51757         throws java.io.ObjectStreamException {
51758       return super.writeReplace();
51759     }
51760 
51761     @java.lang.Override
equals(final java.lang.Object obj)51762     public boolean equals(final java.lang.Object obj) {
51763       if (obj == this) {
51764        return true;
51765       }
51766       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse)) {
51767         return super.equals(obj);
51768       }
51769       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse) obj;
51770 
51771       boolean result = true;
51772       result = result &&
51773           getUnknownFields().equals(other.getUnknownFields());
51774       return result;
51775     }
51776 
51777     private int memoizedHashCode = 0;
51778     @java.lang.Override
hashCode()51779     public int hashCode() {
51780       if (memoizedHashCode != 0) {
51781         return memoizedHashCode;
51782       }
51783       int hash = 41;
51784       hash = (19 * hash) + getDescriptorForType().hashCode();
51785       hash = (29 * hash) + getUnknownFields().hashCode();
51786       memoizedHashCode = hash;
51787       return hash;
51788     }
51789 
parseFrom( com.google.protobuf.ByteString data)51790     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(
51791         com.google.protobuf.ByteString data)
51792         throws com.google.protobuf.InvalidProtocolBufferException {
51793       return PARSER.parseFrom(data);
51794     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51795     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(
51796         com.google.protobuf.ByteString data,
51797         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51798         throws com.google.protobuf.InvalidProtocolBufferException {
51799       return PARSER.parseFrom(data, extensionRegistry);
51800     }
parseFrom(byte[] data)51801     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(byte[] data)
51802         throws com.google.protobuf.InvalidProtocolBufferException {
51803       return PARSER.parseFrom(data);
51804     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51805     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(
51806         byte[] data,
51807         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51808         throws com.google.protobuf.InvalidProtocolBufferException {
51809       return PARSER.parseFrom(data, extensionRegistry);
51810     }
parseFrom(java.io.InputStream input)51811     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(java.io.InputStream input)
51812         throws java.io.IOException {
51813       return PARSER.parseFrom(input);
51814     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51815     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(
51816         java.io.InputStream input,
51817         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51818         throws java.io.IOException {
51819       return PARSER.parseFrom(input, extensionRegistry);
51820     }
parseDelimitedFrom(java.io.InputStream input)51821     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseDelimitedFrom(java.io.InputStream input)
51822         throws java.io.IOException {
51823       return PARSER.parseDelimitedFrom(input);
51824     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51825     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseDelimitedFrom(
51826         java.io.InputStream input,
51827         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51828         throws java.io.IOException {
51829       return PARSER.parseDelimitedFrom(input, extensionRegistry);
51830     }
parseFrom( com.google.protobuf.CodedInputStream input)51831     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(
51832         com.google.protobuf.CodedInputStream input)
51833         throws java.io.IOException {
51834       return PARSER.parseFrom(input);
51835     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51836     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parseFrom(
51837         com.google.protobuf.CodedInputStream input,
51838         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51839         throws java.io.IOException {
51840       return PARSER.parseFrom(input, extensionRegistry);
51841     }
51842 
newBuilder()51843     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()51844     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse prototype)51845     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse prototype) {
51846       return newBuilder().mergeFrom(prototype);
51847     }
toBuilder()51848     public Builder toBuilder() { return newBuilder(this); }
51849 
51850     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)51851     protected Builder newBuilderForType(
51852         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
51853       Builder builder = new Builder(parent);
51854       return builder;
51855     }
51856     /**
51857      * Protobuf type {@code SetQuotaResponse}
51858      */
51859     public static final class Builder extends
51860         com.google.protobuf.GeneratedMessage.Builder<Builder>
51861        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponseOrBuilder {
51862       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()51863           getDescriptor() {
51864         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaResponse_descriptor;
51865       }
51866 
51867       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()51868           internalGetFieldAccessorTable() {
51869         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaResponse_fieldAccessorTable
51870             .ensureFieldAccessorsInitialized(
51871                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.Builder.class);
51872       }
51873 
51874       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.newBuilder()
Builder()51875       private Builder() {
51876         maybeForceBuilderInitialization();
51877       }
51878 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)51879       private Builder(
51880           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
51881         super(parent);
51882         maybeForceBuilderInitialization();
51883       }
maybeForceBuilderInitialization()51884       private void maybeForceBuilderInitialization() {
51885         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
51886         }
51887       }
create()51888       private static Builder create() {
51889         return new Builder();
51890       }
51891 
clear()51892       public Builder clear() {
51893         super.clear();
51894         return this;
51895       }
51896 
clone()51897       public Builder clone() {
51898         return create().mergeFrom(buildPartial());
51899       }
51900 
51901       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()51902           getDescriptorForType() {
51903         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SetQuotaResponse_descriptor;
51904       }
51905 
getDefaultInstanceForType()51906       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse getDefaultInstanceForType() {
51907         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
51908       }
51909 
build()51910       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse build() {
51911         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse result = buildPartial();
51912         if (!result.isInitialized()) {
51913           throw newUninitializedMessageException(result);
51914         }
51915         return result;
51916       }
51917 
buildPartial()51918       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse buildPartial() {
51919         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse(this);
51920         onBuilt();
51921         return result;
51922       }
51923 
mergeFrom(com.google.protobuf.Message other)51924       public Builder mergeFrom(com.google.protobuf.Message other) {
51925         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse) {
51926           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse)other);
51927         } else {
51928           super.mergeFrom(other);
51929           return this;
51930         }
51931       }
51932 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse other)51933       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse other) {
51934         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance()) return this;
51935         this.mergeUnknownFields(other.getUnknownFields());
51936         return this;
51937       }
51938 
isInitialized()51939       public final boolean isInitialized() {
51940         return true;
51941       }
51942 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)51943       public Builder mergeFrom(
51944           com.google.protobuf.CodedInputStream input,
51945           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
51946           throws java.io.IOException {
51947         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse parsedMessage = null;
51948         try {
51949           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
51950         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
51951           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse) e.getUnfinishedMessage();
51952           throw e;
51953         } finally {
51954           if (parsedMessage != null) {
51955             mergeFrom(parsedMessage);
51956           }
51957         }
51958         return this;
51959       }
51960 
51961       // @@protoc_insertion_point(builder_scope:SetQuotaResponse)
51962     }
51963 
51964     static {
51965       defaultInstance = new SetQuotaResponse(true);
defaultInstance.initFields()51966       defaultInstance.initFields();
51967     }
51968 
51969     // @@protoc_insertion_point(class_scope:SetQuotaResponse)
51970   }
51971 
51972   public interface MajorCompactionTimestampRequestOrBuilder
51973       extends com.google.protobuf.MessageOrBuilder {
51974 
51975     // required .TableName table_name = 1;
51976     /**
51977      * <code>required .TableName table_name = 1;</code>
51978      */
hasTableName()51979     boolean hasTableName();
51980     /**
51981      * <code>required .TableName table_name = 1;</code>
51982      */
getTableName()51983     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
51984     /**
51985      * <code>required .TableName table_name = 1;</code>
51986      */
getTableNameOrBuilder()51987     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
51988   }
51989   /**
51990    * Protobuf type {@code MajorCompactionTimestampRequest}
51991    */
51992   public static final class MajorCompactionTimestampRequest extends
51993       com.google.protobuf.GeneratedMessage
51994       implements MajorCompactionTimestampRequestOrBuilder {
51995     // Use MajorCompactionTimestampRequest.newBuilder() to construct.
MajorCompactionTimestampRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)51996     private MajorCompactionTimestampRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
51997       super(builder);
51998       this.unknownFields = builder.getUnknownFields();
51999     }
MajorCompactionTimestampRequest(boolean noInit)52000     private MajorCompactionTimestampRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
52001 
52002     private static final MajorCompactionTimestampRequest defaultInstance;
getDefaultInstance()52003     public static MajorCompactionTimestampRequest getDefaultInstance() {
52004       return defaultInstance;
52005     }
52006 
getDefaultInstanceForType()52007     public MajorCompactionTimestampRequest getDefaultInstanceForType() {
52008       return defaultInstance;
52009     }
52010 
52011     private final com.google.protobuf.UnknownFieldSet unknownFields;
52012     @java.lang.Override
52013     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()52014         getUnknownFields() {
52015       return this.unknownFields;
52016     }
MajorCompactionTimestampRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52017     private MajorCompactionTimestampRequest(
52018         com.google.protobuf.CodedInputStream input,
52019         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52020         throws com.google.protobuf.InvalidProtocolBufferException {
52021       initFields();
52022       int mutable_bitField0_ = 0;
52023       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
52024           com.google.protobuf.UnknownFieldSet.newBuilder();
52025       try {
52026         boolean done = false;
52027         while (!done) {
52028           int tag = input.readTag();
52029           switch (tag) {
52030             case 0:
52031               done = true;
52032               break;
52033             default: {
52034               if (!parseUnknownField(input, unknownFields,
52035                                      extensionRegistry, tag)) {
52036                 done = true;
52037               }
52038               break;
52039             }
52040             case 10: {
52041               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
52042               if (((bitField0_ & 0x00000001) == 0x00000001)) {
52043                 subBuilder = tableName_.toBuilder();
52044               }
52045               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
52046               if (subBuilder != null) {
52047                 subBuilder.mergeFrom(tableName_);
52048                 tableName_ = subBuilder.buildPartial();
52049               }
52050               bitField0_ |= 0x00000001;
52051               break;
52052             }
52053           }
52054         }
52055       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
52056         throw e.setUnfinishedMessage(this);
52057       } catch (java.io.IOException e) {
52058         throw new com.google.protobuf.InvalidProtocolBufferException(
52059             e.getMessage()).setUnfinishedMessage(this);
52060       } finally {
52061         this.unknownFields = unknownFields.build();
52062         makeExtensionsImmutable();
52063       }
52064     }
52065     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()52066         getDescriptor() {
52067       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampRequest_descriptor;
52068     }
52069 
52070     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()52071         internalGetFieldAccessorTable() {
52072       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampRequest_fieldAccessorTable
52073           .ensureFieldAccessorsInitialized(
52074               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.Builder.class);
52075     }
52076 
52077     public static com.google.protobuf.Parser<MajorCompactionTimestampRequest> PARSER =
52078         new com.google.protobuf.AbstractParser<MajorCompactionTimestampRequest>() {
52079       public MajorCompactionTimestampRequest parsePartialFrom(
52080           com.google.protobuf.CodedInputStream input,
52081           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52082           throws com.google.protobuf.InvalidProtocolBufferException {
52083         return new MajorCompactionTimestampRequest(input, extensionRegistry);
52084       }
52085     };
52086 
52087     @java.lang.Override
getParserForType()52088     public com.google.protobuf.Parser<MajorCompactionTimestampRequest> getParserForType() {
52089       return PARSER;
52090     }
52091 
52092     private int bitField0_;
52093     // required .TableName table_name = 1;
52094     public static final int TABLE_NAME_FIELD_NUMBER = 1;
52095     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
52096     /**
52097      * <code>required .TableName table_name = 1;</code>
52098      */
hasTableName()52099     public boolean hasTableName() {
52100       return ((bitField0_ & 0x00000001) == 0x00000001);
52101     }
52102     /**
52103      * <code>required .TableName table_name = 1;</code>
52104      */
getTableName()52105     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
52106       return tableName_;
52107     }
52108     /**
52109      * <code>required .TableName table_name = 1;</code>
52110      */
getTableNameOrBuilder()52111     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
52112       return tableName_;
52113     }
52114 
initFields()52115     private void initFields() {
52116       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
52117     }
52118     private byte memoizedIsInitialized = -1;
isInitialized()52119     public final boolean isInitialized() {
52120       byte isInitialized = memoizedIsInitialized;
52121       if (isInitialized != -1) return isInitialized == 1;
52122 
52123       if (!hasTableName()) {
52124         memoizedIsInitialized = 0;
52125         return false;
52126       }
52127       if (!getTableName().isInitialized()) {
52128         memoizedIsInitialized = 0;
52129         return false;
52130       }
52131       memoizedIsInitialized = 1;
52132       return true;
52133     }
52134 
writeTo(com.google.protobuf.CodedOutputStream output)52135     public void writeTo(com.google.protobuf.CodedOutputStream output)
52136                         throws java.io.IOException {
52137       getSerializedSize();
52138       if (((bitField0_ & 0x00000001) == 0x00000001)) {
52139         output.writeMessage(1, tableName_);
52140       }
52141       getUnknownFields().writeTo(output);
52142     }
52143 
52144     private int memoizedSerializedSize = -1;
getSerializedSize()52145     public int getSerializedSize() {
52146       int size = memoizedSerializedSize;
52147       if (size != -1) return size;
52148 
52149       size = 0;
52150       if (((bitField0_ & 0x00000001) == 0x00000001)) {
52151         size += com.google.protobuf.CodedOutputStream
52152           .computeMessageSize(1, tableName_);
52153       }
52154       size += getUnknownFields().getSerializedSize();
52155       memoizedSerializedSize = size;
52156       return size;
52157     }
52158 
52159     private static final long serialVersionUID = 0L;
52160     @java.lang.Override
writeReplace()52161     protected java.lang.Object writeReplace()
52162         throws java.io.ObjectStreamException {
52163       return super.writeReplace();
52164     }
52165 
52166     @java.lang.Override
equals(final java.lang.Object obj)52167     public boolean equals(final java.lang.Object obj) {
52168       if (obj == this) {
52169        return true;
52170       }
52171       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)) {
52172         return super.equals(obj);
52173       }
52174       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest) obj;
52175 
52176       boolean result = true;
52177       result = result && (hasTableName() == other.hasTableName());
52178       if (hasTableName()) {
52179         result = result && getTableName()
52180             .equals(other.getTableName());
52181       }
52182       result = result &&
52183           getUnknownFields().equals(other.getUnknownFields());
52184       return result;
52185     }
52186 
52187     private int memoizedHashCode = 0;
52188     @java.lang.Override
hashCode()52189     public int hashCode() {
52190       if (memoizedHashCode != 0) {
52191         return memoizedHashCode;
52192       }
52193       int hash = 41;
52194       hash = (19 * hash) + getDescriptorForType().hashCode();
52195       if (hasTableName()) {
52196         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
52197         hash = (53 * hash) + getTableName().hashCode();
52198       }
52199       hash = (29 * hash) + getUnknownFields().hashCode();
52200       memoizedHashCode = hash;
52201       return hash;
52202     }
52203 
parseFrom( com.google.protobuf.ByteString data)52204     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(
52205         com.google.protobuf.ByteString data)
52206         throws com.google.protobuf.InvalidProtocolBufferException {
52207       return PARSER.parseFrom(data);
52208     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52209     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(
52210         com.google.protobuf.ByteString data,
52211         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52212         throws com.google.protobuf.InvalidProtocolBufferException {
52213       return PARSER.parseFrom(data, extensionRegistry);
52214     }
parseFrom(byte[] data)52215     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(byte[] data)
52216         throws com.google.protobuf.InvalidProtocolBufferException {
52217       return PARSER.parseFrom(data);
52218     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52219     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(
52220         byte[] data,
52221         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52222         throws com.google.protobuf.InvalidProtocolBufferException {
52223       return PARSER.parseFrom(data, extensionRegistry);
52224     }
parseFrom(java.io.InputStream input)52225     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(java.io.InputStream input)
52226         throws java.io.IOException {
52227       return PARSER.parseFrom(input);
52228     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52229     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(
52230         java.io.InputStream input,
52231         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52232         throws java.io.IOException {
52233       return PARSER.parseFrom(input, extensionRegistry);
52234     }
parseDelimitedFrom(java.io.InputStream input)52235     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseDelimitedFrom(java.io.InputStream input)
52236         throws java.io.IOException {
52237       return PARSER.parseDelimitedFrom(input);
52238     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52239     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseDelimitedFrom(
52240         java.io.InputStream input,
52241         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52242         throws java.io.IOException {
52243       return PARSER.parseDelimitedFrom(input, extensionRegistry);
52244     }
parseFrom( com.google.protobuf.CodedInputStream input)52245     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(
52246         com.google.protobuf.CodedInputStream input)
52247         throws java.io.IOException {
52248       return PARSER.parseFrom(input);
52249     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52250     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parseFrom(
52251         com.google.protobuf.CodedInputStream input,
52252         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52253         throws java.io.IOException {
52254       return PARSER.parseFrom(input, extensionRegistry);
52255     }
52256 
newBuilder()52257     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()52258     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest prototype)52259     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest prototype) {
52260       return newBuilder().mergeFrom(prototype);
52261     }
toBuilder()52262     public Builder toBuilder() { return newBuilder(this); }
52263 
52264     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)52265     protected Builder newBuilderForType(
52266         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
52267       Builder builder = new Builder(parent);
52268       return builder;
52269     }
52270     /**
52271      * Protobuf type {@code MajorCompactionTimestampRequest}
52272      */
52273     public static final class Builder extends
52274         com.google.protobuf.GeneratedMessage.Builder<Builder>
52275        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequestOrBuilder {
52276       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()52277           getDescriptor() {
52278         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampRequest_descriptor;
52279       }
52280 
52281       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()52282           internalGetFieldAccessorTable() {
52283         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampRequest_fieldAccessorTable
52284             .ensureFieldAccessorsInitialized(
52285                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.Builder.class);
52286       }
52287 
52288       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.newBuilder()
Builder()52289       private Builder() {
52290         maybeForceBuilderInitialization();
52291       }
52292 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)52293       private Builder(
52294           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
52295         super(parent);
52296         maybeForceBuilderInitialization();
52297       }
maybeForceBuilderInitialization()52298       private void maybeForceBuilderInitialization() {
52299         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
52300           getTableNameFieldBuilder();
52301         }
52302       }
create()52303       private static Builder create() {
52304         return new Builder();
52305       }
52306 
clear()52307       public Builder clear() {
52308         super.clear();
52309         if (tableNameBuilder_ == null) {
52310           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
52311         } else {
52312           tableNameBuilder_.clear();
52313         }
52314         bitField0_ = (bitField0_ & ~0x00000001);
52315         return this;
52316       }
52317 
clone()52318       public Builder clone() {
52319         return create().mergeFrom(buildPartial());
52320       }
52321 
52322       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()52323           getDescriptorForType() {
52324         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampRequest_descriptor;
52325       }
52326 
getDefaultInstanceForType()52327       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest getDefaultInstanceForType() {
52328         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
52329       }
52330 
build()52331       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest build() {
52332         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest result = buildPartial();
52333         if (!result.isInitialized()) {
52334           throw newUninitializedMessageException(result);
52335         }
52336         return result;
52337       }
52338 
buildPartial()52339       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest buildPartial() {
52340         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest(this);
52341         int from_bitField0_ = bitField0_;
52342         int to_bitField0_ = 0;
52343         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
52344           to_bitField0_ |= 0x00000001;
52345         }
52346         if (tableNameBuilder_ == null) {
52347           result.tableName_ = tableName_;
52348         } else {
52349           result.tableName_ = tableNameBuilder_.build();
52350         }
52351         result.bitField0_ = to_bitField0_;
52352         onBuilt();
52353         return result;
52354       }
52355 
mergeFrom(com.google.protobuf.Message other)52356       public Builder mergeFrom(com.google.protobuf.Message other) {
52357         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest) {
52358           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)other);
52359         } else {
52360           super.mergeFrom(other);
52361           return this;
52362         }
52363       }
52364 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest other)52365       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest other) {
52366         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance()) return this;
52367         if (other.hasTableName()) {
52368           mergeTableName(other.getTableName());
52369         }
52370         this.mergeUnknownFields(other.getUnknownFields());
52371         return this;
52372       }
52373 
isInitialized()52374       public final boolean isInitialized() {
52375         if (!hasTableName()) {
52376 
52377           return false;
52378         }
52379         if (!getTableName().isInitialized()) {
52380 
52381           return false;
52382         }
52383         return true;
52384       }
52385 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52386       public Builder mergeFrom(
52387           com.google.protobuf.CodedInputStream input,
52388           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52389           throws java.io.IOException {
52390         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest parsedMessage = null;
52391         try {
52392           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
52393         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
52394           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest) e.getUnfinishedMessage();
52395           throw e;
52396         } finally {
52397           if (parsedMessage != null) {
52398             mergeFrom(parsedMessage);
52399           }
52400         }
52401         return this;
52402       }
52403       private int bitField0_;
52404 
52405       // required .TableName table_name = 1;
52406       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
52407       private com.google.protobuf.SingleFieldBuilder<
52408           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
52409       /**
52410        * <code>required .TableName table_name = 1;</code>
52411        */
hasTableName()52412       public boolean hasTableName() {
52413         return ((bitField0_ & 0x00000001) == 0x00000001);
52414       }
52415       /**
52416        * <code>required .TableName table_name = 1;</code>
52417        */
getTableName()52418       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
52419         if (tableNameBuilder_ == null) {
52420           return tableName_;
52421         } else {
52422           return tableNameBuilder_.getMessage();
52423         }
52424       }
52425       /**
52426        * <code>required .TableName table_name = 1;</code>
52427        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)52428       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
52429         if (tableNameBuilder_ == null) {
52430           if (value == null) {
52431             throw new NullPointerException();
52432           }
52433           tableName_ = value;
52434           onChanged();
52435         } else {
52436           tableNameBuilder_.setMessage(value);
52437         }
52438         bitField0_ |= 0x00000001;
52439         return this;
52440       }
52441       /**
52442        * <code>required .TableName table_name = 1;</code>
52443        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)52444       public Builder setTableName(
52445           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
52446         if (tableNameBuilder_ == null) {
52447           tableName_ = builderForValue.build();
52448           onChanged();
52449         } else {
52450           tableNameBuilder_.setMessage(builderForValue.build());
52451         }
52452         bitField0_ |= 0x00000001;
52453         return this;
52454       }
52455       /**
52456        * <code>required .TableName table_name = 1;</code>
52457        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)52458       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
52459         if (tableNameBuilder_ == null) {
52460           if (((bitField0_ & 0x00000001) == 0x00000001) &&
52461               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
52462             tableName_ =
52463               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
52464           } else {
52465             tableName_ = value;
52466           }
52467           onChanged();
52468         } else {
52469           tableNameBuilder_.mergeFrom(value);
52470         }
52471         bitField0_ |= 0x00000001;
52472         return this;
52473       }
52474       /**
52475        * <code>required .TableName table_name = 1;</code>
52476        */
clearTableName()52477       public Builder clearTableName() {
52478         if (tableNameBuilder_ == null) {
52479           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
52480           onChanged();
52481         } else {
52482           tableNameBuilder_.clear();
52483         }
52484         bitField0_ = (bitField0_ & ~0x00000001);
52485         return this;
52486       }
52487       /**
52488        * <code>required .TableName table_name = 1;</code>
52489        */
getTableNameBuilder()52490       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
52491         bitField0_ |= 0x00000001;
52492         onChanged();
52493         return getTableNameFieldBuilder().getBuilder();
52494       }
52495       /**
52496        * <code>required .TableName table_name = 1;</code>
52497        */
getTableNameOrBuilder()52498       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
52499         if (tableNameBuilder_ != null) {
52500           return tableNameBuilder_.getMessageOrBuilder();
52501         } else {
52502           return tableName_;
52503         }
52504       }
52505       /**
52506        * <code>required .TableName table_name = 1;</code>
52507        */
52508       private com.google.protobuf.SingleFieldBuilder<
52509           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()52510           getTableNameFieldBuilder() {
52511         if (tableNameBuilder_ == null) {
52512           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
52513               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
52514                   tableName_,
52515                   getParentForChildren(),
52516                   isClean());
52517           tableName_ = null;
52518         }
52519         return tableNameBuilder_;
52520       }
52521 
52522       // @@protoc_insertion_point(builder_scope:MajorCompactionTimestampRequest)
52523     }
52524 
52525     static {
52526       defaultInstance = new MajorCompactionTimestampRequest(true);
defaultInstance.initFields()52527       defaultInstance.initFields();
52528     }
52529 
52530     // @@protoc_insertion_point(class_scope:MajorCompactionTimestampRequest)
52531   }
52532 
52533   public interface MajorCompactionTimestampForRegionRequestOrBuilder
52534       extends com.google.protobuf.MessageOrBuilder {
52535 
52536     // required .RegionSpecifier region = 1;
52537     /**
52538      * <code>required .RegionSpecifier region = 1;</code>
52539      */
hasRegion()52540     boolean hasRegion();
52541     /**
52542      * <code>required .RegionSpecifier region = 1;</code>
52543      */
getRegion()52544     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
52545     /**
52546      * <code>required .RegionSpecifier region = 1;</code>
52547      */
getRegionOrBuilder()52548     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
52549   }
52550   /**
52551    * Protobuf type {@code MajorCompactionTimestampForRegionRequest}
52552    */
52553   public static final class MajorCompactionTimestampForRegionRequest extends
52554       com.google.protobuf.GeneratedMessage
52555       implements MajorCompactionTimestampForRegionRequestOrBuilder {
52556     // Use MajorCompactionTimestampForRegionRequest.newBuilder() to construct.
MajorCompactionTimestampForRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)52557     private MajorCompactionTimestampForRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
52558       super(builder);
52559       this.unknownFields = builder.getUnknownFields();
52560     }
MajorCompactionTimestampForRegionRequest(boolean noInit)52561     private MajorCompactionTimestampForRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
52562 
52563     private static final MajorCompactionTimestampForRegionRequest defaultInstance;
getDefaultInstance()52564     public static MajorCompactionTimestampForRegionRequest getDefaultInstance() {
52565       return defaultInstance;
52566     }
52567 
getDefaultInstanceForType()52568     public MajorCompactionTimestampForRegionRequest getDefaultInstanceForType() {
52569       return defaultInstance;
52570     }
52571 
52572     private final com.google.protobuf.UnknownFieldSet unknownFields;
52573     @java.lang.Override
52574     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()52575         getUnknownFields() {
52576       return this.unknownFields;
52577     }
MajorCompactionTimestampForRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52578     private MajorCompactionTimestampForRegionRequest(
52579         com.google.protobuf.CodedInputStream input,
52580         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52581         throws com.google.protobuf.InvalidProtocolBufferException {
52582       initFields();
52583       int mutable_bitField0_ = 0;
52584       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
52585           com.google.protobuf.UnknownFieldSet.newBuilder();
52586       try {
52587         boolean done = false;
52588         while (!done) {
52589           int tag = input.readTag();
52590           switch (tag) {
52591             case 0:
52592               done = true;
52593               break;
52594             default: {
52595               if (!parseUnknownField(input, unknownFields,
52596                                      extensionRegistry, tag)) {
52597                 done = true;
52598               }
52599               break;
52600             }
52601             case 10: {
52602               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
52603               if (((bitField0_ & 0x00000001) == 0x00000001)) {
52604                 subBuilder = region_.toBuilder();
52605               }
52606               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
52607               if (subBuilder != null) {
52608                 subBuilder.mergeFrom(region_);
52609                 region_ = subBuilder.buildPartial();
52610               }
52611               bitField0_ |= 0x00000001;
52612               break;
52613             }
52614           }
52615         }
52616       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
52617         throw e.setUnfinishedMessage(this);
52618       } catch (java.io.IOException e) {
52619         throw new com.google.protobuf.InvalidProtocolBufferException(
52620             e.getMessage()).setUnfinishedMessage(this);
52621       } finally {
52622         this.unknownFields = unknownFields.build();
52623         makeExtensionsImmutable();
52624       }
52625     }
52626     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()52627         getDescriptor() {
52628       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampForRegionRequest_descriptor;
52629     }
52630 
52631     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()52632         internalGetFieldAccessorTable() {
52633       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampForRegionRequest_fieldAccessorTable
52634           .ensureFieldAccessorsInitialized(
52635               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.Builder.class);
52636     }
52637 
52638     public static com.google.protobuf.Parser<MajorCompactionTimestampForRegionRequest> PARSER =
52639         new com.google.protobuf.AbstractParser<MajorCompactionTimestampForRegionRequest>() {
52640       public MajorCompactionTimestampForRegionRequest parsePartialFrom(
52641           com.google.protobuf.CodedInputStream input,
52642           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52643           throws com.google.protobuf.InvalidProtocolBufferException {
52644         return new MajorCompactionTimestampForRegionRequest(input, extensionRegistry);
52645       }
52646     };
52647 
52648     @java.lang.Override
getParserForType()52649     public com.google.protobuf.Parser<MajorCompactionTimestampForRegionRequest> getParserForType() {
52650       return PARSER;
52651     }
52652 
52653     private int bitField0_;
52654     // required .RegionSpecifier region = 1;
52655     public static final int REGION_FIELD_NUMBER = 1;
52656     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
52657     /**
52658      * <code>required .RegionSpecifier region = 1;</code>
52659      */
hasRegion()52660     public boolean hasRegion() {
52661       return ((bitField0_ & 0x00000001) == 0x00000001);
52662     }
52663     /**
52664      * <code>required .RegionSpecifier region = 1;</code>
52665      */
getRegion()52666     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
52667       return region_;
52668     }
52669     /**
52670      * <code>required .RegionSpecifier region = 1;</code>
52671      */
getRegionOrBuilder()52672     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
52673       return region_;
52674     }
52675 
initFields()52676     private void initFields() {
52677       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
52678     }
52679     private byte memoizedIsInitialized = -1;
isInitialized()52680     public final boolean isInitialized() {
52681       byte isInitialized = memoizedIsInitialized;
52682       if (isInitialized != -1) return isInitialized == 1;
52683 
52684       if (!hasRegion()) {
52685         memoizedIsInitialized = 0;
52686         return false;
52687       }
52688       if (!getRegion().isInitialized()) {
52689         memoizedIsInitialized = 0;
52690         return false;
52691       }
52692       memoizedIsInitialized = 1;
52693       return true;
52694     }
52695 
writeTo(com.google.protobuf.CodedOutputStream output)52696     public void writeTo(com.google.protobuf.CodedOutputStream output)
52697                         throws java.io.IOException {
52698       getSerializedSize();
52699       if (((bitField0_ & 0x00000001) == 0x00000001)) {
52700         output.writeMessage(1, region_);
52701       }
52702       getUnknownFields().writeTo(output);
52703     }
52704 
52705     private int memoizedSerializedSize = -1;
getSerializedSize()52706     public int getSerializedSize() {
52707       int size = memoizedSerializedSize;
52708       if (size != -1) return size;
52709 
52710       size = 0;
52711       if (((bitField0_ & 0x00000001) == 0x00000001)) {
52712         size += com.google.protobuf.CodedOutputStream
52713           .computeMessageSize(1, region_);
52714       }
52715       size += getUnknownFields().getSerializedSize();
52716       memoizedSerializedSize = size;
52717       return size;
52718     }
52719 
52720     private static final long serialVersionUID = 0L;
52721     @java.lang.Override
writeReplace()52722     protected java.lang.Object writeReplace()
52723         throws java.io.ObjectStreamException {
52724       return super.writeReplace();
52725     }
52726 
52727     @java.lang.Override
equals(final java.lang.Object obj)52728     public boolean equals(final java.lang.Object obj) {
52729       if (obj == this) {
52730        return true;
52731       }
52732       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)) {
52733         return super.equals(obj);
52734       }
52735       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest) obj;
52736 
52737       boolean result = true;
52738       result = result && (hasRegion() == other.hasRegion());
52739       if (hasRegion()) {
52740         result = result && getRegion()
52741             .equals(other.getRegion());
52742       }
52743       result = result &&
52744           getUnknownFields().equals(other.getUnknownFields());
52745       return result;
52746     }
52747 
52748     private int memoizedHashCode = 0;
52749     @java.lang.Override
hashCode()52750     public int hashCode() {
52751       if (memoizedHashCode != 0) {
52752         return memoizedHashCode;
52753       }
52754       int hash = 41;
52755       hash = (19 * hash) + getDescriptorForType().hashCode();
52756       if (hasRegion()) {
52757         hash = (37 * hash) + REGION_FIELD_NUMBER;
52758         hash = (53 * hash) + getRegion().hashCode();
52759       }
52760       hash = (29 * hash) + getUnknownFields().hashCode();
52761       memoizedHashCode = hash;
52762       return hash;
52763     }
52764 
parseFrom( com.google.protobuf.ByteString data)52765     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(
52766         com.google.protobuf.ByteString data)
52767         throws com.google.protobuf.InvalidProtocolBufferException {
52768       return PARSER.parseFrom(data);
52769     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52770     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(
52771         com.google.protobuf.ByteString data,
52772         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52773         throws com.google.protobuf.InvalidProtocolBufferException {
52774       return PARSER.parseFrom(data, extensionRegistry);
52775     }
parseFrom(byte[] data)52776     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(byte[] data)
52777         throws com.google.protobuf.InvalidProtocolBufferException {
52778       return PARSER.parseFrom(data);
52779     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52780     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(
52781         byte[] data,
52782         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52783         throws com.google.protobuf.InvalidProtocolBufferException {
52784       return PARSER.parseFrom(data, extensionRegistry);
52785     }
parseFrom(java.io.InputStream input)52786     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(java.io.InputStream input)
52787         throws java.io.IOException {
52788       return PARSER.parseFrom(input);
52789     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52790     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(
52791         java.io.InputStream input,
52792         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52793         throws java.io.IOException {
52794       return PARSER.parseFrom(input, extensionRegistry);
52795     }
parseDelimitedFrom(java.io.InputStream input)52796     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseDelimitedFrom(java.io.InputStream input)
52797         throws java.io.IOException {
52798       return PARSER.parseDelimitedFrom(input);
52799     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52800     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseDelimitedFrom(
52801         java.io.InputStream input,
52802         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52803         throws java.io.IOException {
52804       return PARSER.parseDelimitedFrom(input, extensionRegistry);
52805     }
parseFrom( com.google.protobuf.CodedInputStream input)52806     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(
52807         com.google.protobuf.CodedInputStream input)
52808         throws java.io.IOException {
52809       return PARSER.parseFrom(input);
52810     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52811     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parseFrom(
52812         com.google.protobuf.CodedInputStream input,
52813         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52814         throws java.io.IOException {
52815       return PARSER.parseFrom(input, extensionRegistry);
52816     }
52817 
newBuilder()52818     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()52819     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest prototype)52820     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest prototype) {
52821       return newBuilder().mergeFrom(prototype);
52822     }
toBuilder()52823     public Builder toBuilder() { return newBuilder(this); }
52824 
52825     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)52826     protected Builder newBuilderForType(
52827         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
52828       Builder builder = new Builder(parent);
52829       return builder;
52830     }
52831     /**
52832      * Protobuf type {@code MajorCompactionTimestampForRegionRequest}
52833      */
52834     public static final class Builder extends
52835         com.google.protobuf.GeneratedMessage.Builder<Builder>
52836        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequestOrBuilder {
52837       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()52838           getDescriptor() {
52839         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampForRegionRequest_descriptor;
52840       }
52841 
52842       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()52843           internalGetFieldAccessorTable() {
52844         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampForRegionRequest_fieldAccessorTable
52845             .ensureFieldAccessorsInitialized(
52846                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.Builder.class);
52847       }
52848 
52849       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.newBuilder()
Builder()52850       private Builder() {
52851         maybeForceBuilderInitialization();
52852       }
52853 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)52854       private Builder(
52855           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
52856         super(parent);
52857         maybeForceBuilderInitialization();
52858       }
maybeForceBuilderInitialization()52859       private void maybeForceBuilderInitialization() {
52860         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
52861           getRegionFieldBuilder();
52862         }
52863       }
create()52864       private static Builder create() {
52865         return new Builder();
52866       }
52867 
clear()52868       public Builder clear() {
52869         super.clear();
52870         if (regionBuilder_ == null) {
52871           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
52872         } else {
52873           regionBuilder_.clear();
52874         }
52875         bitField0_ = (bitField0_ & ~0x00000001);
52876         return this;
52877       }
52878 
clone()52879       public Builder clone() {
52880         return create().mergeFrom(buildPartial());
52881       }
52882 
52883       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()52884           getDescriptorForType() {
52885         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampForRegionRequest_descriptor;
52886       }
52887 
getDefaultInstanceForType()52888       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest getDefaultInstanceForType() {
52889         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
52890       }
52891 
build()52892       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest build() {
52893         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest result = buildPartial();
52894         if (!result.isInitialized()) {
52895           throw newUninitializedMessageException(result);
52896         }
52897         return result;
52898       }
52899 
buildPartial()52900       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest buildPartial() {
52901         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest(this);
52902         int from_bitField0_ = bitField0_;
52903         int to_bitField0_ = 0;
52904         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
52905           to_bitField0_ |= 0x00000001;
52906         }
52907         if (regionBuilder_ == null) {
52908           result.region_ = region_;
52909         } else {
52910           result.region_ = regionBuilder_.build();
52911         }
52912         result.bitField0_ = to_bitField0_;
52913         onBuilt();
52914         return result;
52915       }
52916 
mergeFrom(com.google.protobuf.Message other)52917       public Builder mergeFrom(com.google.protobuf.Message other) {
52918         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest) {
52919           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)other);
52920         } else {
52921           super.mergeFrom(other);
52922           return this;
52923         }
52924       }
52925 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest other)52926       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest other) {
52927         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance()) return this;
52928         if (other.hasRegion()) {
52929           mergeRegion(other.getRegion());
52930         }
52931         this.mergeUnknownFields(other.getUnknownFields());
52932         return this;
52933       }
52934 
isInitialized()52935       public final boolean isInitialized() {
52936         if (!hasRegion()) {
52937 
52938           return false;
52939         }
52940         if (!getRegion().isInitialized()) {
52941 
52942           return false;
52943         }
52944         return true;
52945       }
52946 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)52947       public Builder mergeFrom(
52948           com.google.protobuf.CodedInputStream input,
52949           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
52950           throws java.io.IOException {
52951         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest parsedMessage = null;
52952         try {
52953           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
52954         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
52955           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest) e.getUnfinishedMessage();
52956           throw e;
52957         } finally {
52958           if (parsedMessage != null) {
52959             mergeFrom(parsedMessage);
52960           }
52961         }
52962         return this;
52963       }
52964       private int bitField0_;
52965 
52966       // required .RegionSpecifier region = 1;
52967       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
52968       private com.google.protobuf.SingleFieldBuilder<
52969           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
52970       /**
52971        * <code>required .RegionSpecifier region = 1;</code>
52972        */
hasRegion()52973       public boolean hasRegion() {
52974         return ((bitField0_ & 0x00000001) == 0x00000001);
52975       }
52976       /**
52977        * <code>required .RegionSpecifier region = 1;</code>
52978        */
getRegion()52979       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
52980         if (regionBuilder_ == null) {
52981           return region_;
52982         } else {
52983           return regionBuilder_.getMessage();
52984         }
52985       }
52986       /**
52987        * <code>required .RegionSpecifier region = 1;</code>
52988        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)52989       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
52990         if (regionBuilder_ == null) {
52991           if (value == null) {
52992             throw new NullPointerException();
52993           }
52994           region_ = value;
52995           onChanged();
52996         } else {
52997           regionBuilder_.setMessage(value);
52998         }
52999         bitField0_ |= 0x00000001;
53000         return this;
53001       }
53002       /**
53003        * <code>required .RegionSpecifier region = 1;</code>
53004        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)53005       public Builder setRegion(
53006           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
53007         if (regionBuilder_ == null) {
53008           region_ = builderForValue.build();
53009           onChanged();
53010         } else {
53011           regionBuilder_.setMessage(builderForValue.build());
53012         }
53013         bitField0_ |= 0x00000001;
53014         return this;
53015       }
53016       /**
53017        * <code>required .RegionSpecifier region = 1;</code>
53018        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)53019       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
53020         if (regionBuilder_ == null) {
53021           if (((bitField0_ & 0x00000001) == 0x00000001) &&
53022               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
53023             region_ =
53024               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
53025           } else {
53026             region_ = value;
53027           }
53028           onChanged();
53029         } else {
53030           regionBuilder_.mergeFrom(value);
53031         }
53032         bitField0_ |= 0x00000001;
53033         return this;
53034       }
53035       /**
53036        * <code>required .RegionSpecifier region = 1;</code>
53037        */
clearRegion()53038       public Builder clearRegion() {
53039         if (regionBuilder_ == null) {
53040           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
53041           onChanged();
53042         } else {
53043           regionBuilder_.clear();
53044         }
53045         bitField0_ = (bitField0_ & ~0x00000001);
53046         return this;
53047       }
53048       /**
53049        * <code>required .RegionSpecifier region = 1;</code>
53050        */
getRegionBuilder()53051       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
53052         bitField0_ |= 0x00000001;
53053         onChanged();
53054         return getRegionFieldBuilder().getBuilder();
53055       }
53056       /**
53057        * <code>required .RegionSpecifier region = 1;</code>
53058        */
getRegionOrBuilder()53059       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
53060         if (regionBuilder_ != null) {
53061           return regionBuilder_.getMessageOrBuilder();
53062         } else {
53063           return region_;
53064         }
53065       }
53066       /**
53067        * <code>required .RegionSpecifier region = 1;</code>
53068        */
53069       private com.google.protobuf.SingleFieldBuilder<
53070           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()53071           getRegionFieldBuilder() {
53072         if (regionBuilder_ == null) {
53073           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
53074               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
53075                   region_,
53076                   getParentForChildren(),
53077                   isClean());
53078           region_ = null;
53079         }
53080         return regionBuilder_;
53081       }
53082 
53083       // @@protoc_insertion_point(builder_scope:MajorCompactionTimestampForRegionRequest)
53084     }
53085 
53086     static {
53087       defaultInstance = new MajorCompactionTimestampForRegionRequest(true);
defaultInstance.initFields()53088       defaultInstance.initFields();
53089     }
53090 
53091     // @@protoc_insertion_point(class_scope:MajorCompactionTimestampForRegionRequest)
53092   }
53093 
53094   public interface MajorCompactionTimestampResponseOrBuilder
53095       extends com.google.protobuf.MessageOrBuilder {
53096 
53097     // required int64 compaction_timestamp = 1;
53098     /**
53099      * <code>required int64 compaction_timestamp = 1;</code>
53100      */
hasCompactionTimestamp()53101     boolean hasCompactionTimestamp();
53102     /**
53103      * <code>required int64 compaction_timestamp = 1;</code>
53104      */
getCompactionTimestamp()53105     long getCompactionTimestamp();
53106   }
53107   /**
53108    * Protobuf type {@code MajorCompactionTimestampResponse}
53109    */
53110   public static final class MajorCompactionTimestampResponse extends
53111       com.google.protobuf.GeneratedMessage
53112       implements MajorCompactionTimestampResponseOrBuilder {
53113     // Use MajorCompactionTimestampResponse.newBuilder() to construct.
MajorCompactionTimestampResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)53114     private MajorCompactionTimestampResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
53115       super(builder);
53116       this.unknownFields = builder.getUnknownFields();
53117     }
MajorCompactionTimestampResponse(boolean noInit)53118     private MajorCompactionTimestampResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
53119 
53120     private static final MajorCompactionTimestampResponse defaultInstance;
getDefaultInstance()53121     public static MajorCompactionTimestampResponse getDefaultInstance() {
53122       return defaultInstance;
53123     }
53124 
getDefaultInstanceForType()53125     public MajorCompactionTimestampResponse getDefaultInstanceForType() {
53126       return defaultInstance;
53127     }
53128 
53129     private final com.google.protobuf.UnknownFieldSet unknownFields;
53130     @java.lang.Override
53131     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()53132         getUnknownFields() {
53133       return this.unknownFields;
53134     }
MajorCompactionTimestampResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53135     private MajorCompactionTimestampResponse(
53136         com.google.protobuf.CodedInputStream input,
53137         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53138         throws com.google.protobuf.InvalidProtocolBufferException {
53139       initFields();
53140       int mutable_bitField0_ = 0;
53141       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
53142           com.google.protobuf.UnknownFieldSet.newBuilder();
53143       try {
53144         boolean done = false;
53145         while (!done) {
53146           int tag = input.readTag();
53147           switch (tag) {
53148             case 0:
53149               done = true;
53150               break;
53151             default: {
53152               if (!parseUnknownField(input, unknownFields,
53153                                      extensionRegistry, tag)) {
53154                 done = true;
53155               }
53156               break;
53157             }
53158             case 8: {
53159               bitField0_ |= 0x00000001;
53160               compactionTimestamp_ = input.readInt64();
53161               break;
53162             }
53163           }
53164         }
53165       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
53166         throw e.setUnfinishedMessage(this);
53167       } catch (java.io.IOException e) {
53168         throw new com.google.protobuf.InvalidProtocolBufferException(
53169             e.getMessage()).setUnfinishedMessage(this);
53170       } finally {
53171         this.unknownFields = unknownFields.build();
53172         makeExtensionsImmutable();
53173       }
53174     }
53175     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()53176         getDescriptor() {
53177       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampResponse_descriptor;
53178     }
53179 
53180     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()53181         internalGetFieldAccessorTable() {
53182       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampResponse_fieldAccessorTable
53183           .ensureFieldAccessorsInitialized(
53184               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.Builder.class);
53185     }
53186 
53187     public static com.google.protobuf.Parser<MajorCompactionTimestampResponse> PARSER =
53188         new com.google.protobuf.AbstractParser<MajorCompactionTimestampResponse>() {
53189       public MajorCompactionTimestampResponse parsePartialFrom(
53190           com.google.protobuf.CodedInputStream input,
53191           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53192           throws com.google.protobuf.InvalidProtocolBufferException {
53193         return new MajorCompactionTimestampResponse(input, extensionRegistry);
53194       }
53195     };
53196 
53197     @java.lang.Override
getParserForType()53198     public com.google.protobuf.Parser<MajorCompactionTimestampResponse> getParserForType() {
53199       return PARSER;
53200     }
53201 
53202     private int bitField0_;
53203     // required int64 compaction_timestamp = 1;
53204     public static final int COMPACTION_TIMESTAMP_FIELD_NUMBER = 1;
53205     private long compactionTimestamp_;
53206     /**
53207      * <code>required int64 compaction_timestamp = 1;</code>
53208      */
hasCompactionTimestamp()53209     public boolean hasCompactionTimestamp() {
53210       return ((bitField0_ & 0x00000001) == 0x00000001);
53211     }
53212     /**
53213      * <code>required int64 compaction_timestamp = 1;</code>
53214      */
getCompactionTimestamp()53215     public long getCompactionTimestamp() {
53216       return compactionTimestamp_;
53217     }
53218 
initFields()53219     private void initFields() {
53220       compactionTimestamp_ = 0L;
53221     }
53222     private byte memoizedIsInitialized = -1;
isInitialized()53223     public final boolean isInitialized() {
53224       byte isInitialized = memoizedIsInitialized;
53225       if (isInitialized != -1) return isInitialized == 1;
53226 
53227       if (!hasCompactionTimestamp()) {
53228         memoizedIsInitialized = 0;
53229         return false;
53230       }
53231       memoizedIsInitialized = 1;
53232       return true;
53233     }
53234 
writeTo(com.google.protobuf.CodedOutputStream output)53235     public void writeTo(com.google.protobuf.CodedOutputStream output)
53236                         throws java.io.IOException {
53237       getSerializedSize();
53238       if (((bitField0_ & 0x00000001) == 0x00000001)) {
53239         output.writeInt64(1, compactionTimestamp_);
53240       }
53241       getUnknownFields().writeTo(output);
53242     }
53243 
53244     private int memoizedSerializedSize = -1;
getSerializedSize()53245     public int getSerializedSize() {
53246       int size = memoizedSerializedSize;
53247       if (size != -1) return size;
53248 
53249       size = 0;
53250       if (((bitField0_ & 0x00000001) == 0x00000001)) {
53251         size += com.google.protobuf.CodedOutputStream
53252           .computeInt64Size(1, compactionTimestamp_);
53253       }
53254       size += getUnknownFields().getSerializedSize();
53255       memoizedSerializedSize = size;
53256       return size;
53257     }
53258 
53259     private static final long serialVersionUID = 0L;
53260     @java.lang.Override
writeReplace()53261     protected java.lang.Object writeReplace()
53262         throws java.io.ObjectStreamException {
53263       return super.writeReplace();
53264     }
53265 
53266     @java.lang.Override
equals(final java.lang.Object obj)53267     public boolean equals(final java.lang.Object obj) {
53268       if (obj == this) {
53269        return true;
53270       }
53271       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse)) {
53272         return super.equals(obj);
53273       }
53274       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) obj;
53275 
53276       boolean result = true;
53277       result = result && (hasCompactionTimestamp() == other.hasCompactionTimestamp());
53278       if (hasCompactionTimestamp()) {
53279         result = result && (getCompactionTimestamp()
53280             == other.getCompactionTimestamp());
53281       }
53282       result = result &&
53283           getUnknownFields().equals(other.getUnknownFields());
53284       return result;
53285     }
53286 
53287     private int memoizedHashCode = 0;
53288     @java.lang.Override
hashCode()53289     public int hashCode() {
53290       if (memoizedHashCode != 0) {
53291         return memoizedHashCode;
53292       }
53293       int hash = 41;
53294       hash = (19 * hash) + getDescriptorForType().hashCode();
53295       if (hasCompactionTimestamp()) {
53296         hash = (37 * hash) + COMPACTION_TIMESTAMP_FIELD_NUMBER;
53297         hash = (53 * hash) + hashLong(getCompactionTimestamp());
53298       }
53299       hash = (29 * hash) + getUnknownFields().hashCode();
53300       memoizedHashCode = hash;
53301       return hash;
53302     }
53303 
parseFrom( com.google.protobuf.ByteString data)53304     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(
53305         com.google.protobuf.ByteString data)
53306         throws com.google.protobuf.InvalidProtocolBufferException {
53307       return PARSER.parseFrom(data);
53308     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53309     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(
53310         com.google.protobuf.ByteString data,
53311         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53312         throws com.google.protobuf.InvalidProtocolBufferException {
53313       return PARSER.parseFrom(data, extensionRegistry);
53314     }
parseFrom(byte[] data)53315     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(byte[] data)
53316         throws com.google.protobuf.InvalidProtocolBufferException {
53317       return PARSER.parseFrom(data);
53318     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53319     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(
53320         byte[] data,
53321         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53322         throws com.google.protobuf.InvalidProtocolBufferException {
53323       return PARSER.parseFrom(data, extensionRegistry);
53324     }
parseFrom(java.io.InputStream input)53325     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(java.io.InputStream input)
53326         throws java.io.IOException {
53327       return PARSER.parseFrom(input);
53328     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53329     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(
53330         java.io.InputStream input,
53331         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53332         throws java.io.IOException {
53333       return PARSER.parseFrom(input, extensionRegistry);
53334     }
parseDelimitedFrom(java.io.InputStream input)53335     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseDelimitedFrom(java.io.InputStream input)
53336         throws java.io.IOException {
53337       return PARSER.parseDelimitedFrom(input);
53338     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53339     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseDelimitedFrom(
53340         java.io.InputStream input,
53341         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53342         throws java.io.IOException {
53343       return PARSER.parseDelimitedFrom(input, extensionRegistry);
53344     }
parseFrom( com.google.protobuf.CodedInputStream input)53345     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(
53346         com.google.protobuf.CodedInputStream input)
53347         throws java.io.IOException {
53348       return PARSER.parseFrom(input);
53349     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53350     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parseFrom(
53351         com.google.protobuf.CodedInputStream input,
53352         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53353         throws java.io.IOException {
53354       return PARSER.parseFrom(input, extensionRegistry);
53355     }
53356 
newBuilder()53357     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()53358     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse prototype)53359     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse prototype) {
53360       return newBuilder().mergeFrom(prototype);
53361     }
toBuilder()53362     public Builder toBuilder() { return newBuilder(this); }
53363 
53364     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)53365     protected Builder newBuilderForType(
53366         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
53367       Builder builder = new Builder(parent);
53368       return builder;
53369     }
53370     /**
53371      * Protobuf type {@code MajorCompactionTimestampResponse}
53372      */
53373     public static final class Builder extends
53374         com.google.protobuf.GeneratedMessage.Builder<Builder>
53375        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponseOrBuilder {
53376       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()53377           getDescriptor() {
53378         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampResponse_descriptor;
53379       }
53380 
53381       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()53382           internalGetFieldAccessorTable() {
53383         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampResponse_fieldAccessorTable
53384             .ensureFieldAccessorsInitialized(
53385                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.Builder.class);
53386       }
53387 
53388       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.newBuilder()
Builder()53389       private Builder() {
53390         maybeForceBuilderInitialization();
53391       }
53392 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)53393       private Builder(
53394           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
53395         super(parent);
53396         maybeForceBuilderInitialization();
53397       }
maybeForceBuilderInitialization()53398       private void maybeForceBuilderInitialization() {
53399         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
53400         }
53401       }
create()53402       private static Builder create() {
53403         return new Builder();
53404       }
53405 
clear()53406       public Builder clear() {
53407         super.clear();
53408         compactionTimestamp_ = 0L;
53409         bitField0_ = (bitField0_ & ~0x00000001);
53410         return this;
53411       }
53412 
clone()53413       public Builder clone() {
53414         return create().mergeFrom(buildPartial());
53415       }
53416 
53417       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()53418           getDescriptorForType() {
53419         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_MajorCompactionTimestampResponse_descriptor;
53420       }
53421 
getDefaultInstanceForType()53422       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getDefaultInstanceForType() {
53423         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
53424       }
53425 
build()53426       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse build() {
53427         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse result = buildPartial();
53428         if (!result.isInitialized()) {
53429           throw newUninitializedMessageException(result);
53430         }
53431         return result;
53432       }
53433 
buildPartial()53434       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse buildPartial() {
53435         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse(this);
53436         int from_bitField0_ = bitField0_;
53437         int to_bitField0_ = 0;
53438         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
53439           to_bitField0_ |= 0x00000001;
53440         }
53441         result.compactionTimestamp_ = compactionTimestamp_;
53442         result.bitField0_ = to_bitField0_;
53443         onBuilt();
53444         return result;
53445       }
53446 
mergeFrom(com.google.protobuf.Message other)53447       public Builder mergeFrom(com.google.protobuf.Message other) {
53448         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) {
53449           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse)other);
53450         } else {
53451           super.mergeFrom(other);
53452           return this;
53453         }
53454       }
53455 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse other)53456       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse other) {
53457         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance()) return this;
53458         if (other.hasCompactionTimestamp()) {
53459           setCompactionTimestamp(other.getCompactionTimestamp());
53460         }
53461         this.mergeUnknownFields(other.getUnknownFields());
53462         return this;
53463       }
53464 
isInitialized()53465       public final boolean isInitialized() {
53466         if (!hasCompactionTimestamp()) {
53467 
53468           return false;
53469         }
53470         return true;
53471       }
53472 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53473       public Builder mergeFrom(
53474           com.google.protobuf.CodedInputStream input,
53475           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53476           throws java.io.IOException {
53477         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse parsedMessage = null;
53478         try {
53479           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
53480         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
53481           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) e.getUnfinishedMessage();
53482           throw e;
53483         } finally {
53484           if (parsedMessage != null) {
53485             mergeFrom(parsedMessage);
53486           }
53487         }
53488         return this;
53489       }
53490       private int bitField0_;
53491 
53492       // required int64 compaction_timestamp = 1;
53493       private long compactionTimestamp_ ;
53494       /**
53495        * <code>required int64 compaction_timestamp = 1;</code>
53496        */
hasCompactionTimestamp()53497       public boolean hasCompactionTimestamp() {
53498         return ((bitField0_ & 0x00000001) == 0x00000001);
53499       }
53500       /**
53501        * <code>required int64 compaction_timestamp = 1;</code>
53502        */
getCompactionTimestamp()53503       public long getCompactionTimestamp() {
53504         return compactionTimestamp_;
53505       }
53506       /**
53507        * <code>required int64 compaction_timestamp = 1;</code>
53508        */
setCompactionTimestamp(long value)53509       public Builder setCompactionTimestamp(long value) {
53510         bitField0_ |= 0x00000001;
53511         compactionTimestamp_ = value;
53512         onChanged();
53513         return this;
53514       }
53515       /**
53516        * <code>required int64 compaction_timestamp = 1;</code>
53517        */
clearCompactionTimestamp()53518       public Builder clearCompactionTimestamp() {
53519         bitField0_ = (bitField0_ & ~0x00000001);
53520         compactionTimestamp_ = 0L;
53521         onChanged();
53522         return this;
53523       }
53524 
53525       // @@protoc_insertion_point(builder_scope:MajorCompactionTimestampResponse)
53526     }
53527 
53528     static {
53529       defaultInstance = new MajorCompactionTimestampResponse(true);
defaultInstance.initFields()53530       defaultInstance.initFields();
53531     }
53532 
53533     // @@protoc_insertion_point(class_scope:MajorCompactionTimestampResponse)
53534   }
53535 
53536   public interface SecurityCapabilitiesRequestOrBuilder
53537       extends com.google.protobuf.MessageOrBuilder {
53538   }
53539   /**
53540    * Protobuf type {@code SecurityCapabilitiesRequest}
53541    */
53542   public static final class SecurityCapabilitiesRequest extends
53543       com.google.protobuf.GeneratedMessage
53544       implements SecurityCapabilitiesRequestOrBuilder {
53545     // Use SecurityCapabilitiesRequest.newBuilder() to construct.
SecurityCapabilitiesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)53546     private SecurityCapabilitiesRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
53547       super(builder);
53548       this.unknownFields = builder.getUnknownFields();
53549     }
SecurityCapabilitiesRequest(boolean noInit)53550     private SecurityCapabilitiesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
53551 
53552     private static final SecurityCapabilitiesRequest defaultInstance;
getDefaultInstance()53553     public static SecurityCapabilitiesRequest getDefaultInstance() {
53554       return defaultInstance;
53555     }
53556 
getDefaultInstanceForType()53557     public SecurityCapabilitiesRequest getDefaultInstanceForType() {
53558       return defaultInstance;
53559     }
53560 
53561     private final com.google.protobuf.UnknownFieldSet unknownFields;
53562     @java.lang.Override
53563     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()53564         getUnknownFields() {
53565       return this.unknownFields;
53566     }
SecurityCapabilitiesRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53567     private SecurityCapabilitiesRequest(
53568         com.google.protobuf.CodedInputStream input,
53569         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53570         throws com.google.protobuf.InvalidProtocolBufferException {
53571       initFields();
53572       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
53573           com.google.protobuf.UnknownFieldSet.newBuilder();
53574       try {
53575         boolean done = false;
53576         while (!done) {
53577           int tag = input.readTag();
53578           switch (tag) {
53579             case 0:
53580               done = true;
53581               break;
53582             default: {
53583               if (!parseUnknownField(input, unknownFields,
53584                                      extensionRegistry, tag)) {
53585                 done = true;
53586               }
53587               break;
53588             }
53589           }
53590         }
53591       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
53592         throw e.setUnfinishedMessage(this);
53593       } catch (java.io.IOException e) {
53594         throw new com.google.protobuf.InvalidProtocolBufferException(
53595             e.getMessage()).setUnfinishedMessage(this);
53596       } finally {
53597         this.unknownFields = unknownFields.build();
53598         makeExtensionsImmutable();
53599       }
53600     }
53601     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()53602         getDescriptor() {
53603       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesRequest_descriptor;
53604     }
53605 
53606     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()53607         internalGetFieldAccessorTable() {
53608       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesRequest_fieldAccessorTable
53609           .ensureFieldAccessorsInitialized(
53610               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.Builder.class);
53611     }
53612 
53613     public static com.google.protobuf.Parser<SecurityCapabilitiesRequest> PARSER =
53614         new com.google.protobuf.AbstractParser<SecurityCapabilitiesRequest>() {
53615       public SecurityCapabilitiesRequest parsePartialFrom(
53616           com.google.protobuf.CodedInputStream input,
53617           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53618           throws com.google.protobuf.InvalidProtocolBufferException {
53619         return new SecurityCapabilitiesRequest(input, extensionRegistry);
53620       }
53621     };
53622 
53623     @java.lang.Override
getParserForType()53624     public com.google.protobuf.Parser<SecurityCapabilitiesRequest> getParserForType() {
53625       return PARSER;
53626     }
53627 
initFields()53628     private void initFields() {
53629     }
53630     private byte memoizedIsInitialized = -1;
isInitialized()53631     public final boolean isInitialized() {
53632       byte isInitialized = memoizedIsInitialized;
53633       if (isInitialized != -1) return isInitialized == 1;
53634 
53635       memoizedIsInitialized = 1;
53636       return true;
53637     }
53638 
writeTo(com.google.protobuf.CodedOutputStream output)53639     public void writeTo(com.google.protobuf.CodedOutputStream output)
53640                         throws java.io.IOException {
53641       getSerializedSize();
53642       getUnknownFields().writeTo(output);
53643     }
53644 
53645     private int memoizedSerializedSize = -1;
getSerializedSize()53646     public int getSerializedSize() {
53647       int size = memoizedSerializedSize;
53648       if (size != -1) return size;
53649 
53650       size = 0;
53651       size += getUnknownFields().getSerializedSize();
53652       memoizedSerializedSize = size;
53653       return size;
53654     }
53655 
53656     private static final long serialVersionUID = 0L;
53657     @java.lang.Override
writeReplace()53658     protected java.lang.Object writeReplace()
53659         throws java.io.ObjectStreamException {
53660       return super.writeReplace();
53661     }
53662 
53663     @java.lang.Override
equals(final java.lang.Object obj)53664     public boolean equals(final java.lang.Object obj) {
53665       if (obj == this) {
53666        return true;
53667       }
53668       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)) {
53669         return super.equals(obj);
53670       }
53671       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) obj;
53672 
53673       boolean result = true;
53674       result = result &&
53675           getUnknownFields().equals(other.getUnknownFields());
53676       return result;
53677     }
53678 
53679     private int memoizedHashCode = 0;
53680     @java.lang.Override
hashCode()53681     public int hashCode() {
53682       if (memoizedHashCode != 0) {
53683         return memoizedHashCode;
53684       }
53685       int hash = 41;
53686       hash = (19 * hash) + getDescriptorForType().hashCode();
53687       hash = (29 * hash) + getUnknownFields().hashCode();
53688       memoizedHashCode = hash;
53689       return hash;
53690     }
53691 
parseFrom( com.google.protobuf.ByteString data)53692     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
53693         com.google.protobuf.ByteString data)
53694         throws com.google.protobuf.InvalidProtocolBufferException {
53695       return PARSER.parseFrom(data);
53696     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53697     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
53698         com.google.protobuf.ByteString data,
53699         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53700         throws com.google.protobuf.InvalidProtocolBufferException {
53701       return PARSER.parseFrom(data, extensionRegistry);
53702     }
parseFrom(byte[] data)53703     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(byte[] data)
53704         throws com.google.protobuf.InvalidProtocolBufferException {
53705       return PARSER.parseFrom(data);
53706     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53707     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
53708         byte[] data,
53709         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53710         throws com.google.protobuf.InvalidProtocolBufferException {
53711       return PARSER.parseFrom(data, extensionRegistry);
53712     }
parseFrom(java.io.InputStream input)53713     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(java.io.InputStream input)
53714         throws java.io.IOException {
53715       return PARSER.parseFrom(input);
53716     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53717     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
53718         java.io.InputStream input,
53719         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53720         throws java.io.IOException {
53721       return PARSER.parseFrom(input, extensionRegistry);
53722     }
parseDelimitedFrom(java.io.InputStream input)53723     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseDelimitedFrom(java.io.InputStream input)
53724         throws java.io.IOException {
53725       return PARSER.parseDelimitedFrom(input);
53726     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53727     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseDelimitedFrom(
53728         java.io.InputStream input,
53729         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53730         throws java.io.IOException {
53731       return PARSER.parseDelimitedFrom(input, extensionRegistry);
53732     }
parseFrom( com.google.protobuf.CodedInputStream input)53733     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
53734         com.google.protobuf.CodedInputStream input)
53735         throws java.io.IOException {
53736       return PARSER.parseFrom(input);
53737     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53738     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parseFrom(
53739         com.google.protobuf.CodedInputStream input,
53740         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53741         throws java.io.IOException {
53742       return PARSER.parseFrom(input, extensionRegistry);
53743     }
53744 
newBuilder()53745     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()53746     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest prototype)53747     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest prototype) {
53748       return newBuilder().mergeFrom(prototype);
53749     }
toBuilder()53750     public Builder toBuilder() { return newBuilder(this); }
53751 
53752     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)53753     protected Builder newBuilderForType(
53754         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
53755       Builder builder = new Builder(parent);
53756       return builder;
53757     }
53758     /**
53759      * Protobuf type {@code SecurityCapabilitiesRequest}
53760      */
53761     public static final class Builder extends
53762         com.google.protobuf.GeneratedMessage.Builder<Builder>
53763        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequestOrBuilder {
53764       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()53765           getDescriptor() {
53766         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesRequest_descriptor;
53767       }
53768 
53769       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()53770           internalGetFieldAccessorTable() {
53771         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesRequest_fieldAccessorTable
53772             .ensureFieldAccessorsInitialized(
53773                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.Builder.class);
53774       }
53775 
53776       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.newBuilder()
Builder()53777       private Builder() {
53778         maybeForceBuilderInitialization();
53779       }
53780 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)53781       private Builder(
53782           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
53783         super(parent);
53784         maybeForceBuilderInitialization();
53785       }
maybeForceBuilderInitialization()53786       private void maybeForceBuilderInitialization() {
53787         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
53788         }
53789       }
create()53790       private static Builder create() {
53791         return new Builder();
53792       }
53793 
clear()53794       public Builder clear() {
53795         super.clear();
53796         return this;
53797       }
53798 
clone()53799       public Builder clone() {
53800         return create().mergeFrom(buildPartial());
53801       }
53802 
53803       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()53804           getDescriptorForType() {
53805         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesRequest_descriptor;
53806       }
53807 
getDefaultInstanceForType()53808       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest getDefaultInstanceForType() {
53809         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
53810       }
53811 
build()53812       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest build() {
53813         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest result = buildPartial();
53814         if (!result.isInitialized()) {
53815           throw newUninitializedMessageException(result);
53816         }
53817         return result;
53818       }
53819 
buildPartial()53820       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest buildPartial() {
53821         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest(this);
53822         onBuilt();
53823         return result;
53824       }
53825 
mergeFrom(com.google.protobuf.Message other)53826       public Builder mergeFrom(com.google.protobuf.Message other) {
53827         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) {
53828           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)other);
53829         } else {
53830           super.mergeFrom(other);
53831           return this;
53832         }
53833       }
53834 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other)53835       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest other) {
53836         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance()) return this;
53837         this.mergeUnknownFields(other.getUnknownFields());
53838         return this;
53839       }
53840 
isInitialized()53841       public final boolean isInitialized() {
53842         return true;
53843       }
53844 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53845       public Builder mergeFrom(
53846           com.google.protobuf.CodedInputStream input,
53847           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53848           throws java.io.IOException {
53849         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest parsedMessage = null;
53850         try {
53851           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
53852         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
53853           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest) e.getUnfinishedMessage();
53854           throw e;
53855         } finally {
53856           if (parsedMessage != null) {
53857             mergeFrom(parsedMessage);
53858           }
53859         }
53860         return this;
53861       }
53862 
53863       // @@protoc_insertion_point(builder_scope:SecurityCapabilitiesRequest)
53864     }
53865 
53866     static {
53867       defaultInstance = new SecurityCapabilitiesRequest(true);
defaultInstance.initFields()53868       defaultInstance.initFields();
53869     }
53870 
53871     // @@protoc_insertion_point(class_scope:SecurityCapabilitiesRequest)
53872   }
53873 
53874   public interface SecurityCapabilitiesResponseOrBuilder
53875       extends com.google.protobuf.MessageOrBuilder {
53876 
53877     // repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;
53878     /**
53879      * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
53880      */
getCapabilitiesList()53881     java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> getCapabilitiesList();
53882     /**
53883      * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
53884      */
getCapabilitiesCount()53885     int getCapabilitiesCount();
53886     /**
53887      * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
53888      */
getCapabilities(int index)53889     org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index);
53890   }
53891   /**
53892    * Protobuf type {@code SecurityCapabilitiesResponse}
53893    */
53894   public static final class SecurityCapabilitiesResponse extends
53895       com.google.protobuf.GeneratedMessage
53896       implements SecurityCapabilitiesResponseOrBuilder {
53897     // Use SecurityCapabilitiesResponse.newBuilder() to construct.
SecurityCapabilitiesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)53898     private SecurityCapabilitiesResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
53899       super(builder);
53900       this.unknownFields = builder.getUnknownFields();
53901     }
SecurityCapabilitiesResponse(boolean noInit)53902     private SecurityCapabilitiesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
53903 
53904     private static final SecurityCapabilitiesResponse defaultInstance;
getDefaultInstance()53905     public static SecurityCapabilitiesResponse getDefaultInstance() {
53906       return defaultInstance;
53907     }
53908 
getDefaultInstanceForType()53909     public SecurityCapabilitiesResponse getDefaultInstanceForType() {
53910       return defaultInstance;
53911     }
53912 
53913     private final com.google.protobuf.UnknownFieldSet unknownFields;
53914     @java.lang.Override
53915     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()53916         getUnknownFields() {
53917       return this.unknownFields;
53918     }
SecurityCapabilitiesResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)53919     private SecurityCapabilitiesResponse(
53920         com.google.protobuf.CodedInputStream input,
53921         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
53922         throws com.google.protobuf.InvalidProtocolBufferException {
53923       initFields();
53924       int mutable_bitField0_ = 0;
53925       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
53926           com.google.protobuf.UnknownFieldSet.newBuilder();
53927       try {
53928         boolean done = false;
53929         while (!done) {
53930           int tag = input.readTag();
53931           switch (tag) {
53932             case 0:
53933               done = true;
53934               break;
53935             default: {
53936               if (!parseUnknownField(input, unknownFields,
53937                                      extensionRegistry, tag)) {
53938                 done = true;
53939               }
53940               break;
53941             }
53942             case 8: {
53943               int rawValue = input.readEnum();
53944               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(rawValue);
53945               if (value == null) {
53946                 unknownFields.mergeVarintField(1, rawValue);
53947               } else {
53948                 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
53949                   capabilities_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>();
53950                   mutable_bitField0_ |= 0x00000001;
53951                 }
53952                 capabilities_.add(value);
53953               }
53954               break;
53955             }
53956             case 10: {
53957               int length = input.readRawVarint32();
53958               int oldLimit = input.pushLimit(length);
53959               while(input.getBytesUntilLimit() > 0) {
53960                 int rawValue = input.readEnum();
53961                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value = org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability.valueOf(rawValue);
53962                 if (value == null) {
53963                   unknownFields.mergeVarintField(1, rawValue);
53964                 } else {
53965                   if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
53966                     capabilities_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>();
53967                     mutable_bitField0_ |= 0x00000001;
53968                   }
53969                   capabilities_.add(value);
53970                 }
53971               }
53972               input.popLimit(oldLimit);
53973               break;
53974             }
53975           }
53976         }
53977       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
53978         throw e.setUnfinishedMessage(this);
53979       } catch (java.io.IOException e) {
53980         throw new com.google.protobuf.InvalidProtocolBufferException(
53981             e.getMessage()).setUnfinishedMessage(this);
53982       } finally {
53983         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
53984           capabilities_ = java.util.Collections.unmodifiableList(capabilities_);
53985         }
53986         this.unknownFields = unknownFields.build();
53987         makeExtensionsImmutable();
53988       }
53989     }
53990     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()53991         getDescriptor() {
53992       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesResponse_descriptor;
53993     }
53994 
53995     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()53996         internalGetFieldAccessorTable() {
53997       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesResponse_fieldAccessorTable
53998           .ensureFieldAccessorsInitialized(
53999               org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Builder.class);
54000     }
54001 
54002     public static com.google.protobuf.Parser<SecurityCapabilitiesResponse> PARSER =
54003         new com.google.protobuf.AbstractParser<SecurityCapabilitiesResponse>() {
54004       public SecurityCapabilitiesResponse parsePartialFrom(
54005           com.google.protobuf.CodedInputStream input,
54006           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54007           throws com.google.protobuf.InvalidProtocolBufferException {
54008         return new SecurityCapabilitiesResponse(input, extensionRegistry);
54009       }
54010     };
54011 
54012     @java.lang.Override
getParserForType()54013     public com.google.protobuf.Parser<SecurityCapabilitiesResponse> getParserForType() {
54014       return PARSER;
54015     }
54016 
54017     /**
54018      * Protobuf enum {@code SecurityCapabilitiesResponse.Capability}
54019      */
54020     public enum Capability
54021         implements com.google.protobuf.ProtocolMessageEnum {
54022       /**
54023        * <code>SIMPLE_AUTHENTICATION = 0;</code>
54024        */
54025       SIMPLE_AUTHENTICATION(0, 0),
54026       /**
54027        * <code>SECURE_AUTHENTICATION = 1;</code>
54028        */
54029       SECURE_AUTHENTICATION(1, 1),
54030       /**
54031        * <code>AUTHORIZATION = 2;</code>
54032        */
54033       AUTHORIZATION(2, 2),
54034       /**
54035        * <code>CELL_AUTHORIZATION = 3;</code>
54036        */
54037       CELL_AUTHORIZATION(3, 3),
54038       /**
54039        * <code>CELL_VISIBILITY = 4;</code>
54040        */
54041       CELL_VISIBILITY(4, 4),
54042       ;
54043 
54044       /**
54045        * <code>SIMPLE_AUTHENTICATION = 0;</code>
54046        */
54047       public static final int SIMPLE_AUTHENTICATION_VALUE = 0;
54048       /**
54049        * <code>SECURE_AUTHENTICATION = 1;</code>
54050        */
54051       public static final int SECURE_AUTHENTICATION_VALUE = 1;
54052       /**
54053        * <code>AUTHORIZATION = 2;</code>
54054        */
54055       public static final int AUTHORIZATION_VALUE = 2;
54056       /**
54057        * <code>CELL_AUTHORIZATION = 3;</code>
54058        */
54059       public static final int CELL_AUTHORIZATION_VALUE = 3;
54060       /**
54061        * <code>CELL_VISIBILITY = 4;</code>
54062        */
54063       public static final int CELL_VISIBILITY_VALUE = 4;
54064 
54065 
getNumber()54066       public final int getNumber() { return value; }
54067 
valueOf(int value)54068       public static Capability valueOf(int value) {
54069         switch (value) {
54070           case 0: return SIMPLE_AUTHENTICATION;
54071           case 1: return SECURE_AUTHENTICATION;
54072           case 2: return AUTHORIZATION;
54073           case 3: return CELL_AUTHORIZATION;
54074           case 4: return CELL_VISIBILITY;
54075           default: return null;
54076         }
54077       }
54078 
54079       public static com.google.protobuf.Internal.EnumLiteMap<Capability>
internalGetValueMap()54080           internalGetValueMap() {
54081         return internalValueMap;
54082       }
54083       private static com.google.protobuf.Internal.EnumLiteMap<Capability>
54084           internalValueMap =
54085             new com.google.protobuf.Internal.EnumLiteMap<Capability>() {
54086               public Capability findValueByNumber(int number) {
54087                 return Capability.valueOf(number);
54088               }
54089             };
54090 
54091       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()54092           getValueDescriptor() {
54093         return getDescriptor().getValues().get(index);
54094       }
54095       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()54096           getDescriptorForType() {
54097         return getDescriptor();
54098       }
54099       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()54100           getDescriptor() {
54101         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDescriptor().getEnumTypes().get(0);
54102       }
54103 
54104       private static final Capability[] VALUES = values();
54105 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)54106       public static Capability valueOf(
54107           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
54108         if (desc.getType() != getDescriptor()) {
54109           throw new java.lang.IllegalArgumentException(
54110             "EnumValueDescriptor is not for this type.");
54111         }
54112         return VALUES[desc.getIndex()];
54113       }
54114 
54115       private final int index;
54116       private final int value;
54117 
Capability(int index, int value)54118       private Capability(int index, int value) {
54119         this.index = index;
54120         this.value = value;
54121       }
54122 
54123       // @@protoc_insertion_point(enum_scope:SecurityCapabilitiesResponse.Capability)
54124     }
54125 
54126     // repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;
54127     public static final int CAPABILITIES_FIELD_NUMBER = 1;
54128     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> capabilities_;
54129     /**
54130      * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54131      */
getCapabilitiesList()54132     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> getCapabilitiesList() {
54133       return capabilities_;
54134     }
54135     /**
54136      * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54137      */
getCapabilitiesCount()54138     public int getCapabilitiesCount() {
54139       return capabilities_.size();
54140     }
54141     /**
54142      * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54143      */
getCapabilities(int index)54144     public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index) {
54145       return capabilities_.get(index);
54146     }
54147 
initFields()54148     private void initFields() {
54149       capabilities_ = java.util.Collections.emptyList();
54150     }
54151     private byte memoizedIsInitialized = -1;
isInitialized()54152     public final boolean isInitialized() {
54153       byte isInitialized = memoizedIsInitialized;
54154       if (isInitialized != -1) return isInitialized == 1;
54155 
54156       memoizedIsInitialized = 1;
54157       return true;
54158     }
54159 
writeTo(com.google.protobuf.CodedOutputStream output)54160     public void writeTo(com.google.protobuf.CodedOutputStream output)
54161                         throws java.io.IOException {
54162       getSerializedSize();
54163       for (int i = 0; i < capabilities_.size(); i++) {
54164         output.writeEnum(1, capabilities_.get(i).getNumber());
54165       }
54166       getUnknownFields().writeTo(output);
54167     }
54168 
54169     private int memoizedSerializedSize = -1;
getSerializedSize()54170     public int getSerializedSize() {
54171       int size = memoizedSerializedSize;
54172       if (size != -1) return size;
54173 
54174       size = 0;
54175       {
54176         int dataSize = 0;
54177         for (int i = 0; i < capabilities_.size(); i++) {
54178           dataSize += com.google.protobuf.CodedOutputStream
54179             .computeEnumSizeNoTag(capabilities_.get(i).getNumber());
54180         }
54181         size += dataSize;
54182         size += 1 * capabilities_.size();
54183       }
54184       size += getUnknownFields().getSerializedSize();
54185       memoizedSerializedSize = size;
54186       return size;
54187     }
54188 
54189     private static final long serialVersionUID = 0L;
54190     @java.lang.Override
writeReplace()54191     protected java.lang.Object writeReplace()
54192         throws java.io.ObjectStreamException {
54193       return super.writeReplace();
54194     }
54195 
54196     @java.lang.Override
equals(final java.lang.Object obj)54197     public boolean equals(final java.lang.Object obj) {
54198       if (obj == this) {
54199        return true;
54200       }
54201       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse)) {
54202         return super.equals(obj);
54203       }
54204       org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse other = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) obj;
54205 
54206       boolean result = true;
54207       result = result && getCapabilitiesList()
54208           .equals(other.getCapabilitiesList());
54209       result = result &&
54210           getUnknownFields().equals(other.getUnknownFields());
54211       return result;
54212     }
54213 
54214     private int memoizedHashCode = 0;
54215     @java.lang.Override
hashCode()54216     public int hashCode() {
54217       if (memoizedHashCode != 0) {
54218         return memoizedHashCode;
54219       }
54220       int hash = 41;
54221       hash = (19 * hash) + getDescriptorForType().hashCode();
54222       if (getCapabilitiesCount() > 0) {
54223         hash = (37 * hash) + CAPABILITIES_FIELD_NUMBER;
54224         hash = (53 * hash) + hashEnumList(getCapabilitiesList());
54225       }
54226       hash = (29 * hash) + getUnknownFields().hashCode();
54227       memoizedHashCode = hash;
54228       return hash;
54229     }
54230 
parseFrom( com.google.protobuf.ByteString data)54231     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
54232         com.google.protobuf.ByteString data)
54233         throws com.google.protobuf.InvalidProtocolBufferException {
54234       return PARSER.parseFrom(data);
54235     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)54236     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
54237         com.google.protobuf.ByteString data,
54238         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54239         throws com.google.protobuf.InvalidProtocolBufferException {
54240       return PARSER.parseFrom(data, extensionRegistry);
54241     }
parseFrom(byte[] data)54242     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(byte[] data)
54243         throws com.google.protobuf.InvalidProtocolBufferException {
54244       return PARSER.parseFrom(data);
54245     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)54246     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
54247         byte[] data,
54248         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54249         throws com.google.protobuf.InvalidProtocolBufferException {
54250       return PARSER.parseFrom(data, extensionRegistry);
54251     }
parseFrom(java.io.InputStream input)54252     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(java.io.InputStream input)
54253         throws java.io.IOException {
54254       return PARSER.parseFrom(input);
54255     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)54256     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
54257         java.io.InputStream input,
54258         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54259         throws java.io.IOException {
54260       return PARSER.parseFrom(input, extensionRegistry);
54261     }
parseDelimitedFrom(java.io.InputStream input)54262     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseDelimitedFrom(java.io.InputStream input)
54263         throws java.io.IOException {
54264       return PARSER.parseDelimitedFrom(input);
54265     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)54266     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseDelimitedFrom(
54267         java.io.InputStream input,
54268         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54269         throws java.io.IOException {
54270       return PARSER.parseDelimitedFrom(input, extensionRegistry);
54271     }
parseFrom( com.google.protobuf.CodedInputStream input)54272     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
54273         com.google.protobuf.CodedInputStream input)
54274         throws java.io.IOException {
54275       return PARSER.parseFrom(input);
54276     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)54277     public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parseFrom(
54278         com.google.protobuf.CodedInputStream input,
54279         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54280         throws java.io.IOException {
54281       return PARSER.parseFrom(input, extensionRegistry);
54282     }
54283 
newBuilder()54284     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()54285     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse prototype)54286     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse prototype) {
54287       return newBuilder().mergeFrom(prototype);
54288     }
toBuilder()54289     public Builder toBuilder() { return newBuilder(this); }
54290 
54291     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)54292     protected Builder newBuilderForType(
54293         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
54294       Builder builder = new Builder(parent);
54295       return builder;
54296     }
54297     /**
54298      * Protobuf type {@code SecurityCapabilitiesResponse}
54299      */
54300     public static final class Builder extends
54301         com.google.protobuf.GeneratedMessage.Builder<Builder>
54302        implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponseOrBuilder {
54303       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()54304           getDescriptor() {
54305         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesResponse_descriptor;
54306       }
54307 
54308       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()54309           internalGetFieldAccessorTable() {
54310         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesResponse_fieldAccessorTable
54311             .ensureFieldAccessorsInitialized(
54312                 org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Builder.class);
54313       }
54314 
54315       // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.newBuilder()
Builder()54316       private Builder() {
54317         maybeForceBuilderInitialization();
54318       }
54319 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)54320       private Builder(
54321           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
54322         super(parent);
54323         maybeForceBuilderInitialization();
54324       }
maybeForceBuilderInitialization()54325       private void maybeForceBuilderInitialization() {
54326         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
54327         }
54328       }
create()54329       private static Builder create() {
54330         return new Builder();
54331       }
54332 
clear()54333       public Builder clear() {
54334         super.clear();
54335         capabilities_ = java.util.Collections.emptyList();
54336         bitField0_ = (bitField0_ & ~0x00000001);
54337         return this;
54338       }
54339 
clone()54340       public Builder clone() {
54341         return create().mergeFrom(buildPartial());
54342       }
54343 
54344       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()54345           getDescriptorForType() {
54346         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_SecurityCapabilitiesResponse_descriptor;
54347       }
54348 
getDefaultInstanceForType()54349       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getDefaultInstanceForType() {
54350         return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
54351       }
54352 
build()54353       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse build() {
54354         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse result = buildPartial();
54355         if (!result.isInitialized()) {
54356           throw newUninitializedMessageException(result);
54357         }
54358         return result;
54359       }
54360 
buildPartial()54361       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse buildPartial() {
54362         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse(this);
54363         int from_bitField0_ = bitField0_;
54364         if (((bitField0_ & 0x00000001) == 0x00000001)) {
54365           capabilities_ = java.util.Collections.unmodifiableList(capabilities_);
54366           bitField0_ = (bitField0_ & ~0x00000001);
54367         }
54368         result.capabilities_ = capabilities_;
54369         onBuilt();
54370         return result;
54371       }
54372 
mergeFrom(com.google.protobuf.Message other)54373       public Builder mergeFrom(com.google.protobuf.Message other) {
54374         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) {
54375           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse)other);
54376         } else {
54377           super.mergeFrom(other);
54378           return this;
54379         }
54380       }
54381 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse other)54382       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse other) {
54383         if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance()) return this;
54384         if (!other.capabilities_.isEmpty()) {
54385           if (capabilities_.isEmpty()) {
54386             capabilities_ = other.capabilities_;
54387             bitField0_ = (bitField0_ & ~0x00000001);
54388           } else {
54389             ensureCapabilitiesIsMutable();
54390             capabilities_.addAll(other.capabilities_);
54391           }
54392           onChanged();
54393         }
54394         this.mergeUnknownFields(other.getUnknownFields());
54395         return this;
54396       }
54397 
isInitialized()54398       public final boolean isInitialized() {
54399         return true;
54400       }
54401 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)54402       public Builder mergeFrom(
54403           com.google.protobuf.CodedInputStream input,
54404           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
54405           throws java.io.IOException {
54406         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse parsedMessage = null;
54407         try {
54408           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
54409         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
54410           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) e.getUnfinishedMessage();
54411           throw e;
54412         } finally {
54413           if (parsedMessage != null) {
54414             mergeFrom(parsedMessage);
54415           }
54416         }
54417         return this;
54418       }
54419       private int bitField0_;
54420 
54421       // repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;
54422       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> capabilities_ =
54423         java.util.Collections.emptyList();
ensureCapabilitiesIsMutable()54424       private void ensureCapabilitiesIsMutable() {
54425         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
54426           capabilities_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability>(capabilities_);
54427           bitField0_ |= 0x00000001;
54428         }
54429       }
54430       /**
54431        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54432        */
getCapabilitiesList()54433       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> getCapabilitiesList() {
54434         return java.util.Collections.unmodifiableList(capabilities_);
54435       }
54436       /**
54437        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54438        */
getCapabilitiesCount()54439       public int getCapabilitiesCount() {
54440         return capabilities_.size();
54441       }
54442       /**
54443        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54444        */
getCapabilities(int index)54445       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability getCapabilities(int index) {
54446         return capabilities_.get(index);
54447       }
54448       /**
54449        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54450        */
setCapabilities( int index, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value)54451       public Builder setCapabilities(
54452           int index, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value) {
54453         if (value == null) {
54454           throw new NullPointerException();
54455         }
54456         ensureCapabilitiesIsMutable();
54457         capabilities_.set(index, value);
54458         onChanged();
54459         return this;
54460       }
54461       /**
54462        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54463        */
addCapabilities(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value)54464       public Builder addCapabilities(org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability value) {
54465         if (value == null) {
54466           throw new NullPointerException();
54467         }
54468         ensureCapabilitiesIsMutable();
54469         capabilities_.add(value);
54470         onChanged();
54471         return this;
54472       }
54473       /**
54474        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54475        */
addAllCapabilities( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> values)54476       public Builder addAllCapabilities(
54477           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.Capability> values) {
54478         ensureCapabilitiesIsMutable();
54479         super.addAll(values, capabilities_);
54480         onChanged();
54481         return this;
54482       }
54483       /**
54484        * <code>repeated .SecurityCapabilitiesResponse.Capability capabilities = 1;</code>
54485        */
clearCapabilities()54486       public Builder clearCapabilities() {
54487         capabilities_ = java.util.Collections.emptyList();
54488         bitField0_ = (bitField0_ & ~0x00000001);
54489         onChanged();
54490         return this;
54491       }
54492 
54493       // @@protoc_insertion_point(builder_scope:SecurityCapabilitiesResponse)
54494     }
54495 
54496     static {
54497       defaultInstance = new SecurityCapabilitiesResponse(true);
defaultInstance.initFields()54498       defaultInstance.initFields();
54499     }
54500 
54501     // @@protoc_insertion_point(class_scope:SecurityCapabilitiesResponse)
54502   }
54503 
54504   /**
54505    * Protobuf service {@code MasterService}
54506    */
54507   public static abstract class MasterService
54508       implements com.google.protobuf.Service {
MasterService()54509     protected MasterService() {}
54510 
54511     public interface Interface {
54512       /**
54513        * <code>rpc GetSchemaAlterStatus(.GetSchemaAlterStatusRequest) returns (.GetSchemaAlterStatusResponse);</code>
54514        *
54515        * <pre>
54516        ** Used by the client to get the number of regions that have received the updated schema
54517        * </pre>
54518        */
getSchemaAlterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done)54519       public abstract void getSchemaAlterStatus(
54520           com.google.protobuf.RpcController controller,
54521           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request,
54522           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done);
54523 
54524       /**
54525        * <code>rpc GetTableDescriptors(.GetTableDescriptorsRequest) returns (.GetTableDescriptorsResponse);</code>
54526        *
54527        * <pre>
54528        ** Get list of TableDescriptors for requested tables.
54529        * </pre>
54530        */
getTableDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done)54531       public abstract void getTableDescriptors(
54532           com.google.protobuf.RpcController controller,
54533           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request,
54534           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done);
54535 
54536       /**
54537        * <code>rpc GetTableNames(.GetTableNamesRequest) returns (.GetTableNamesResponse);</code>
54538        *
54539        * <pre>
54540        ** Get the list of table names.
54541        * </pre>
54542        */
getTableNames( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done)54543       public abstract void getTableNames(
54544           com.google.protobuf.RpcController controller,
54545           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request,
54546           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done);
54547 
54548       /**
54549        * <code>rpc GetClusterStatus(.GetClusterStatusRequest) returns (.GetClusterStatusResponse);</code>
54550        *
54551        * <pre>
54552        ** Return cluster status.
54553        * </pre>
54554        */
getClusterStatus( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done)54555       public abstract void getClusterStatus(
54556           com.google.protobuf.RpcController controller,
54557           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request,
54558           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done);
54559 
54560       /**
54561        * <code>rpc IsMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code>
54562        *
54563        * <pre>
54564        ** return true if master is available
54565        * </pre>
54566        */
isMasterRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done)54567       public abstract void isMasterRunning(
54568           com.google.protobuf.RpcController controller,
54569           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
54570           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done);
54571 
54572       /**
54573        * <code>rpc AddColumn(.AddColumnRequest) returns (.AddColumnResponse);</code>
54574        *
54575        * <pre>
54576        ** Adds a column to the specified table.
54577        * </pre>
54578        */
addColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done)54579       public abstract void addColumn(
54580           com.google.protobuf.RpcController controller,
54581           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request,
54582           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done);
54583 
54584       /**
54585        * <code>rpc DeleteColumn(.DeleteColumnRequest) returns (.DeleteColumnResponse);</code>
54586        *
54587        * <pre>
54588        ** Deletes a column from the specified table. Table must be disabled.
54589        * </pre>
54590        */
deleteColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done)54591       public abstract void deleteColumn(
54592           com.google.protobuf.RpcController controller,
54593           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request,
54594           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done);
54595 
54596       /**
54597        * <code>rpc ModifyColumn(.ModifyColumnRequest) returns (.ModifyColumnResponse);</code>
54598        *
54599        * <pre>
54600        ** Modifies an existing column on the specified table.
54601        * </pre>
54602        */
modifyColumn( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done)54603       public abstract void modifyColumn(
54604           com.google.protobuf.RpcController controller,
54605           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request,
54606           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done);
54607 
54608       /**
54609        * <code>rpc MoveRegion(.MoveRegionRequest) returns (.MoveRegionResponse);</code>
54610        *
54611        * <pre>
54612        ** Move the region region to the destination server.
54613        * </pre>
54614        */
moveRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done)54615       public abstract void moveRegion(
54616           com.google.protobuf.RpcController controller,
54617           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request,
54618           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done);
54619 
54620       /**
54621        * <code>rpc DispatchMergingRegions(.DispatchMergingRegionsRequest) returns (.DispatchMergingRegionsResponse);</code>
54622        *
54623        * <pre>
54624        ** Master dispatch merging the regions
54625        * </pre>
54626        */
dispatchMergingRegions( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done)54627       public abstract void dispatchMergingRegions(
54628           com.google.protobuf.RpcController controller,
54629           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request,
54630           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done);
54631 
54632       /**
54633        * <code>rpc AssignRegion(.AssignRegionRequest) returns (.AssignRegionResponse);</code>
54634        *
54635        * <pre>
54636        ** Assign a region to a server chosen at random.
54637        * </pre>
54638        */
assignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done)54639       public abstract void assignRegion(
54640           com.google.protobuf.RpcController controller,
54641           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request,
54642           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done);
54643 
54644       /**
54645        * <code>rpc UnassignRegion(.UnassignRegionRequest) returns (.UnassignRegionResponse);</code>
54646        *
54647        * <pre>
54648        **
54649        * Unassign a region from current hosting regionserver.  Region will then be
54650        * assigned to a regionserver chosen at random.  Region could be reassigned
54651        * back to the same server.  Use MoveRegion if you want
54652        * to control the region movement.
54653        * </pre>
54654        */
unassignRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done)54655       public abstract void unassignRegion(
54656           com.google.protobuf.RpcController controller,
54657           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request,
54658           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done);
54659 
54660       /**
54661        * <code>rpc OfflineRegion(.OfflineRegionRequest) returns (.OfflineRegionResponse);</code>
54662        *
54663        * <pre>
54664        **
54665        * Offline a region from the assignment manager's in-memory state.  The
54666        * region should be in a closed state and there will be no attempt to
54667        * automatically reassign the region as in unassign.   This is a special
54668        * method, and should only be used by experts or hbck.
54669        * </pre>
54670        */
offlineRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done)54671       public abstract void offlineRegion(
54672           com.google.protobuf.RpcController controller,
54673           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request,
54674           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done);
54675 
54676       /**
54677        * <code>rpc DeleteTable(.DeleteTableRequest) returns (.DeleteTableResponse);</code>
54678        *
54679        * <pre>
54680        ** Deletes a table
54681        * </pre>
54682        */
deleteTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done)54683       public abstract void deleteTable(
54684           com.google.protobuf.RpcController controller,
54685           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request,
54686           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done);
54687 
54688       /**
54689        * <code>rpc truncateTable(.TruncateTableRequest) returns (.TruncateTableResponse);</code>
54690        *
54691        * <pre>
54692        ** Truncate a table
54693        * </pre>
54694        */
truncateTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done)54695       public abstract void truncateTable(
54696           com.google.protobuf.RpcController controller,
54697           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request,
54698           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done);
54699 
54700       /**
54701        * <code>rpc EnableTable(.EnableTableRequest) returns (.EnableTableResponse);</code>
54702        *
54703        * <pre>
54704        ** Puts the table on-line (only needed if table has been previously taken offline)
54705        * </pre>
54706        */
enableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done)54707       public abstract void enableTable(
54708           com.google.protobuf.RpcController controller,
54709           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request,
54710           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done);
54711 
54712       /**
54713        * <code>rpc DisableTable(.DisableTableRequest) returns (.DisableTableResponse);</code>
54714        *
54715        * <pre>
54716        ** Take table offline
54717        * </pre>
54718        */
disableTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done)54719       public abstract void disableTable(
54720           com.google.protobuf.RpcController controller,
54721           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request,
54722           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done);
54723 
54724       /**
54725        * <code>rpc ModifyTable(.ModifyTableRequest) returns (.ModifyTableResponse);</code>
54726        *
54727        * <pre>
54728        ** Modify a table's metadata
54729        * </pre>
54730        */
modifyTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done)54731       public abstract void modifyTable(
54732           com.google.protobuf.RpcController controller,
54733           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request,
54734           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done);
54735 
54736       /**
54737        * <code>rpc CreateTable(.CreateTableRequest) returns (.CreateTableResponse);</code>
54738        *
54739        * <pre>
54740        ** Creates a new table asynchronously
54741        * </pre>
54742        */
createTable( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done)54743       public abstract void createTable(
54744           com.google.protobuf.RpcController controller,
54745           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request,
54746           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done);
54747 
54748       /**
54749        * <code>rpc Shutdown(.ShutdownRequest) returns (.ShutdownResponse);</code>
54750        *
54751        * <pre>
54752        ** Shutdown an HBase cluster.
54753        * </pre>
54754        */
shutdown( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done)54755       public abstract void shutdown(
54756           com.google.protobuf.RpcController controller,
54757           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request,
54758           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done);
54759 
54760       /**
54761        * <code>rpc StopMaster(.StopMasterRequest) returns (.StopMasterResponse);</code>
54762        *
54763        * <pre>
54764        ** Stop HBase Master only.  Does not shutdown the cluster.
54765        * </pre>
54766        */
stopMaster( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done)54767       public abstract void stopMaster(
54768           com.google.protobuf.RpcController controller,
54769           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request,
54770           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done);
54771 
54772       /**
54773        * <code>rpc Balance(.BalanceRequest) returns (.BalanceResponse);</code>
54774        *
54775        * <pre>
54776        **
54777        * Run the balancer.  Will run the balancer and if regions to move, it will
54778        * go ahead and do the reassignments.  Can NOT run for various reasons.
54779        * Check logs.
54780        * </pre>
54781        */
balance( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done)54782       public abstract void balance(
54783           com.google.protobuf.RpcController controller,
54784           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request,
54785           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done);
54786 
54787       /**
54788        * <code>rpc SetBalancerRunning(.SetBalancerRunningRequest) returns (.SetBalancerRunningResponse);</code>
54789        *
54790        * <pre>
54791        **
54792        * Turn the load balancer on or off.
54793        * If synchronous is true, it waits until current balance() call, if outstanding, to return.
54794        * </pre>
54795        */
setBalancerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done)54796       public abstract void setBalancerRunning(
54797           com.google.protobuf.RpcController controller,
54798           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request,
54799           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done);
54800 
54801       /**
54802        * <code>rpc IsBalancerEnabled(.IsBalancerEnabledRequest) returns (.IsBalancerEnabledResponse);</code>
54803        *
54804        * <pre>
54805        **
54806        * Query whether the Region Balancer is running.
54807        * </pre>
54808        */
isBalancerEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse> done)54809       public abstract void isBalancerEnabled(
54810           com.google.protobuf.RpcController controller,
54811           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request,
54812           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse> done);
54813 
54814       /**
54815        * <code>rpc Normalize(.NormalizeRequest) returns (.NormalizeResponse);</code>
54816        *
54817        * <pre>
54818        **
54819        * Run region normalizer. Can NOT run for various reasons. Check logs.
54820        * </pre>
54821        */
normalize( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse> done)54822       public abstract void normalize(
54823           com.google.protobuf.RpcController controller,
54824           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
54825           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse> done);
54826 
54827       /**
54828        * <code>rpc SetNormalizerRunning(.SetNormalizerRunningRequest) returns (.SetNormalizerRunningResponse);</code>
54829        *
54830        * <pre>
54831        **
54832        * Turn region normalizer on or off.
54833        * </pre>
54834        */
setNormalizerRunning( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse> done)54835       public abstract void setNormalizerRunning(
54836           com.google.protobuf.RpcController controller,
54837           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request,
54838           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse> done);
54839 
54840       /**
54841        * <code>rpc IsNormalizerEnabled(.IsNormalizerEnabledRequest) returns (.IsNormalizerEnabledResponse);</code>
54842        *
54843        * <pre>
54844        **
54845        * Query whether region normalizer is enabled.
54846        * </pre>
54847        */
isNormalizerEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse> done)54848       public abstract void isNormalizerEnabled(
54849           com.google.protobuf.RpcController controller,
54850           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request,
54851           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse> done);
54852 
54853       /**
54854        * <code>rpc RunCatalogScan(.RunCatalogScanRequest) returns (.RunCatalogScanResponse);</code>
54855        *
54856        * <pre>
54857        ** Get a run of the catalog janitor
54858        * </pre>
54859        */
runCatalogScan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done)54860       public abstract void runCatalogScan(
54861           com.google.protobuf.RpcController controller,
54862           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request,
54863           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done);
54864 
54865       /**
54866        * <code>rpc EnableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse);</code>
54867        *
54868        * <pre>
54869        **
54870        * Enable the catalog janitor on or off.
54871        * </pre>
54872        */
enableCatalogJanitor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done)54873       public abstract void enableCatalogJanitor(
54874           com.google.protobuf.RpcController controller,
54875           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request,
54876           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done);
54877 
54878       /**
54879        * <code>rpc IsCatalogJanitorEnabled(.IsCatalogJanitorEnabledRequest) returns (.IsCatalogJanitorEnabledResponse);</code>
54880        *
54881        * <pre>
54882        **
54883        * Query whether the catalog janitor is enabled.
54884        * </pre>
54885        */
isCatalogJanitorEnabled( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done)54886       public abstract void isCatalogJanitorEnabled(
54887           com.google.protobuf.RpcController controller,
54888           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request,
54889           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done);
54890 
54891       /**
54892        * <code>rpc ExecMasterService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
54893        *
54894        * <pre>
54895        **
54896        * Call a master coprocessor endpoint
54897        * </pre>
54898        */
execMasterService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done)54899       public abstract void execMasterService(
54900           com.google.protobuf.RpcController controller,
54901           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
54902           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
54903 
54904       /**
54905        * <code>rpc Snapshot(.SnapshotRequest) returns (.SnapshotResponse);</code>
54906        *
54907        * <pre>
54908        **
54909        * Create a snapshot for the given table.
54910        * </pre>
54911        */
snapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done)54912       public abstract void snapshot(
54913           com.google.protobuf.RpcController controller,
54914           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request,
54915           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done);
54916 
54917       /**
54918        * <code>rpc GetCompletedSnapshots(.GetCompletedSnapshotsRequest) returns (.GetCompletedSnapshotsResponse);</code>
54919        *
54920        * <pre>
54921        **
54922        * Get completed snapshots.
54923        * Returns a list of snapshot descriptors for completed snapshots
54924        * </pre>
54925        */
getCompletedSnapshots( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done)54926       public abstract void getCompletedSnapshots(
54927           com.google.protobuf.RpcController controller,
54928           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request,
54929           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done);
54930 
54931       /**
54932        * <code>rpc DeleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse);</code>
54933        *
54934        * <pre>
54935        **
54936        * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
54937        * </pre>
54938        */
deleteSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done)54939       public abstract void deleteSnapshot(
54940           com.google.protobuf.RpcController controller,
54941           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request,
54942           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done);
54943 
54944       /**
54945        * <code>rpc IsSnapshotDone(.IsSnapshotDoneRequest) returns (.IsSnapshotDoneResponse);</code>
54946        *
54947        * <pre>
54948        **
54949        * Determine if the snapshot is done yet.
54950        * </pre>
54951        */
isSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done)54952       public abstract void isSnapshotDone(
54953           com.google.protobuf.RpcController controller,
54954           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request,
54955           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done);
54956 
54957       /**
54958        * <code>rpc RestoreSnapshot(.RestoreSnapshotRequest) returns (.RestoreSnapshotResponse);</code>
54959        *
54960        * <pre>
54961        **
54962        * Restore a snapshot
54963        * </pre>
54964        */
restoreSnapshot( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done)54965       public abstract void restoreSnapshot(
54966           com.google.protobuf.RpcController controller,
54967           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request,
54968           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done);
54969 
54970       /**
54971        * <code>rpc IsRestoreSnapshotDone(.IsRestoreSnapshotDoneRequest) returns (.IsRestoreSnapshotDoneResponse);</code>
54972        *
54973        * <pre>
54974        **
54975        * Determine if the snapshot restore is done yet.
54976        * </pre>
54977        */
isRestoreSnapshotDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done)54978       public abstract void isRestoreSnapshotDone(
54979           com.google.protobuf.RpcController controller,
54980           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request,
54981           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done);
54982 
54983       /**
54984        * <code>rpc ExecProcedure(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code>
54985        *
54986        * <pre>
54987        **
54988        * Execute a distributed procedure.
54989        * </pre>
54990        */
execProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done)54991       public abstract void execProcedure(
54992           com.google.protobuf.RpcController controller,
54993           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
54994           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done);
54995 
54996       /**
54997        * <code>rpc ExecProcedureWithRet(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code>
54998        *
54999        * <pre>
55000        **
55001        * Execute a distributed procedure with return data.
55002        * </pre>
55003        */
execProcedureWithRet( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done)55004       public abstract void execProcedureWithRet(
55005           com.google.protobuf.RpcController controller,
55006           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
55007           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done);
55008 
55009       /**
55010        * <code>rpc IsProcedureDone(.IsProcedureDoneRequest) returns (.IsProcedureDoneResponse);</code>
55011        *
55012        * <pre>
55013        **
55014        * Determine if the procedure is done yet.
55015        * </pre>
55016        */
isProcedureDone( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done)55017       public abstract void isProcedureDone(
55018           com.google.protobuf.RpcController controller,
55019           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request,
55020           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done);
55021 
55022       /**
55023        * <code>rpc ModifyNamespace(.ModifyNamespaceRequest) returns (.ModifyNamespaceResponse);</code>
55024        *
55025        * <pre>
55026        ** Modify a namespace's metadata
55027        * </pre>
55028        */
modifyNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done)55029       public abstract void modifyNamespace(
55030           com.google.protobuf.RpcController controller,
55031           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request,
55032           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done);
55033 
55034       /**
55035        * <code>rpc CreateNamespace(.CreateNamespaceRequest) returns (.CreateNamespaceResponse);</code>
55036        *
55037        * <pre>
55038        ** Creates a new namespace synchronously
55039        * </pre>
55040        */
createNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done)55041       public abstract void createNamespace(
55042           com.google.protobuf.RpcController controller,
55043           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request,
55044           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done);
55045 
55046       /**
55047        * <code>rpc DeleteNamespace(.DeleteNamespaceRequest) returns (.DeleteNamespaceResponse);</code>
55048        *
55049        * <pre>
55050        ** Deletes namespace synchronously
55051        * </pre>
55052        */
deleteNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done)55053       public abstract void deleteNamespace(
55054           com.google.protobuf.RpcController controller,
55055           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request,
55056           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done);
55057 
55058       /**
55059        * <code>rpc GetNamespaceDescriptor(.GetNamespaceDescriptorRequest) returns (.GetNamespaceDescriptorResponse);</code>
55060        *
55061        * <pre>
55062        ** Get a namespace descriptor by name
55063        * </pre>
55064        */
getNamespaceDescriptor( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done)55065       public abstract void getNamespaceDescriptor(
55066           com.google.protobuf.RpcController controller,
55067           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request,
55068           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done);
55069 
55070       /**
55071        * <code>rpc ListNamespaceDescriptors(.ListNamespaceDescriptorsRequest) returns (.ListNamespaceDescriptorsResponse);</code>
55072        *
55073        * <pre>
55074        ** returns a list of namespaces
55075        * </pre>
55076        */
listNamespaceDescriptors( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done)55077       public abstract void listNamespaceDescriptors(
55078           com.google.protobuf.RpcController controller,
55079           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request,
55080           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done);
55081 
55082       /**
55083        * <code>rpc ListTableDescriptorsByNamespace(.ListTableDescriptorsByNamespaceRequest) returns (.ListTableDescriptorsByNamespaceResponse);</code>
55084        *
55085        * <pre>
55086        ** returns a list of tables for a given namespace
55087        * </pre>
55088        */
listTableDescriptorsByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done)55089       public abstract void listTableDescriptorsByNamespace(
55090           com.google.protobuf.RpcController controller,
55091           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request,
55092           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done);
55093 
55094       /**
55095        * <code>rpc ListTableNamesByNamespace(.ListTableNamesByNamespaceRequest) returns (.ListTableNamesByNamespaceResponse);</code>
55096        *
55097        * <pre>
55098        ** returns a list of tables for a given namespace
55099        * </pre>
55100        */
listTableNamesByNamespace( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done)55101       public abstract void listTableNamesByNamespace(
55102           com.google.protobuf.RpcController controller,
55103           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request,
55104           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done);
55105 
55106       /**
55107        * <code>rpc SetQuota(.SetQuotaRequest) returns (.SetQuotaResponse);</code>
55108        *
55109        * <pre>
55110        ** Apply the new quota settings
55111        * </pre>
55112        */
setQuota( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse> done)55113       public abstract void setQuota(
55114           com.google.protobuf.RpcController controller,
55115           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request,
55116           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse> done);
55117 
55118       /**
55119        * <code>rpc getLastMajorCompactionTimestamp(.MajorCompactionTimestampRequest) returns (.MajorCompactionTimestampResponse);</code>
55120        *
55121        * <pre>
55122        ** Returns the timestamp of the last major compaction
55123        * </pre>
55124        */
getLastMajorCompactionTimestamp( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done)55125       public abstract void getLastMajorCompactionTimestamp(
55126           com.google.protobuf.RpcController controller,
55127           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request,
55128           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done);
55129 
55130       /**
55131        * <code>rpc getLastMajorCompactionTimestampForRegion(.MajorCompactionTimestampForRegionRequest) returns (.MajorCompactionTimestampResponse);</code>
55132        *
55133        * <pre>
55134        ** Returns the timestamp of the last major compaction
55135        * </pre>
55136        */
getLastMajorCompactionTimestampForRegion( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done)55137       public abstract void getLastMajorCompactionTimestampForRegion(
55138           com.google.protobuf.RpcController controller,
55139           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request,
55140           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done);
55141 
55142       /**
55143        * <code>rpc getProcedureResult(.GetProcedureResultRequest) returns (.GetProcedureResultResponse);</code>
55144        */
getProcedureResult( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done)55145       public abstract void getProcedureResult(
55146           com.google.protobuf.RpcController controller,
55147           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
55148           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done);
55149 
55150       /**
55151        * <code>rpc getSecurityCapabilities(.SecurityCapabilitiesRequest) returns (.SecurityCapabilitiesResponse);</code>
55152        *
55153        * <pre>
55154        ** Returns the security capabilities in effect on the cluster
55155        * </pre>
55156        */
getSecurityCapabilities( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done)55157       public abstract void getSecurityCapabilities(
55158           com.google.protobuf.RpcController controller,
55159           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
55160           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done);
55161 
55162       /**
55163        * <code>rpc AbortProcedure(.AbortProcedureRequest) returns (.AbortProcedureResponse);</code>
55164        *
55165        * <pre>
55166        ** Abort a procedure
55167        * </pre>
55168        */
abortProcedure( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done)55169       public abstract void abortProcedure(
55170           com.google.protobuf.RpcController controller,
55171           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
55172           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done);
55173 
55174       /**
55175        * <code>rpc ListProcedures(.ListProceduresRequest) returns (.ListProceduresResponse);</code>
55176        *
55177        * <pre>
55178        ** returns a list of procedures
55179        * </pre>
55180        */
listProcedures( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done)55181       public abstract void listProcedures(
55182           com.google.protobuf.RpcController controller,
55183           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
55184           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done);
55185 
55186     }
55187 
newReflectiveService( final Interface impl)55188     public static com.google.protobuf.Service newReflectiveService(
55189         final Interface impl) {
55190       return new MasterService() {
55191         @java.lang.Override
55192         public  void getSchemaAlterStatus(
55193             com.google.protobuf.RpcController controller,
55194             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request,
55195             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done) {
55196           impl.getSchemaAlterStatus(controller, request, done);
55197         }
55198 
55199         @java.lang.Override
55200         public  void getTableDescriptors(
55201             com.google.protobuf.RpcController controller,
55202             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request,
55203             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done) {
55204           impl.getTableDescriptors(controller, request, done);
55205         }
55206 
55207         @java.lang.Override
55208         public  void getTableNames(
55209             com.google.protobuf.RpcController controller,
55210             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request,
55211             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done) {
55212           impl.getTableNames(controller, request, done);
55213         }
55214 
55215         @java.lang.Override
55216         public  void getClusterStatus(
55217             com.google.protobuf.RpcController controller,
55218             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request,
55219             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done) {
55220           impl.getClusterStatus(controller, request, done);
55221         }
55222 
55223         @java.lang.Override
55224         public  void isMasterRunning(
55225             com.google.protobuf.RpcController controller,
55226             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
55227             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done) {
55228           impl.isMasterRunning(controller, request, done);
55229         }
55230 
55231         @java.lang.Override
55232         public  void addColumn(
55233             com.google.protobuf.RpcController controller,
55234             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request,
55235             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done) {
55236           impl.addColumn(controller, request, done);
55237         }
55238 
55239         @java.lang.Override
55240         public  void deleteColumn(
55241             com.google.protobuf.RpcController controller,
55242             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request,
55243             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done) {
55244           impl.deleteColumn(controller, request, done);
55245         }
55246 
55247         @java.lang.Override
55248         public  void modifyColumn(
55249             com.google.protobuf.RpcController controller,
55250             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request,
55251             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done) {
55252           impl.modifyColumn(controller, request, done);
55253         }
55254 
55255         @java.lang.Override
55256         public  void moveRegion(
55257             com.google.protobuf.RpcController controller,
55258             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request,
55259             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done) {
55260           impl.moveRegion(controller, request, done);
55261         }
55262 
55263         @java.lang.Override
55264         public  void dispatchMergingRegions(
55265             com.google.protobuf.RpcController controller,
55266             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request,
55267             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done) {
55268           impl.dispatchMergingRegions(controller, request, done);
55269         }
55270 
55271         @java.lang.Override
55272         public  void assignRegion(
55273             com.google.protobuf.RpcController controller,
55274             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request,
55275             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done) {
55276           impl.assignRegion(controller, request, done);
55277         }
55278 
55279         @java.lang.Override
55280         public  void unassignRegion(
55281             com.google.protobuf.RpcController controller,
55282             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request,
55283             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done) {
55284           impl.unassignRegion(controller, request, done);
55285         }
55286 
55287         @java.lang.Override
55288         public  void offlineRegion(
55289             com.google.protobuf.RpcController controller,
55290             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request,
55291             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done) {
55292           impl.offlineRegion(controller, request, done);
55293         }
55294 
55295         @java.lang.Override
55296         public  void deleteTable(
55297             com.google.protobuf.RpcController controller,
55298             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request,
55299             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done) {
55300           impl.deleteTable(controller, request, done);
55301         }
55302 
55303         @java.lang.Override
55304         public  void truncateTable(
55305             com.google.protobuf.RpcController controller,
55306             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request,
55307             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done) {
55308           impl.truncateTable(controller, request, done);
55309         }
55310 
55311         @java.lang.Override
55312         public  void enableTable(
55313             com.google.protobuf.RpcController controller,
55314             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request,
55315             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done) {
55316           impl.enableTable(controller, request, done);
55317         }
55318 
55319         @java.lang.Override
55320         public  void disableTable(
55321             com.google.protobuf.RpcController controller,
55322             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request,
55323             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done) {
55324           impl.disableTable(controller, request, done);
55325         }
55326 
55327         @java.lang.Override
55328         public  void modifyTable(
55329             com.google.protobuf.RpcController controller,
55330             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request,
55331             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done) {
55332           impl.modifyTable(controller, request, done);
55333         }
55334 
55335         @java.lang.Override
55336         public  void createTable(
55337             com.google.protobuf.RpcController controller,
55338             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request,
55339             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done) {
55340           impl.createTable(controller, request, done);
55341         }
55342 
55343         @java.lang.Override
55344         public  void shutdown(
55345             com.google.protobuf.RpcController controller,
55346             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request,
55347             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done) {
55348           impl.shutdown(controller, request, done);
55349         }
55350 
55351         @java.lang.Override
55352         public  void stopMaster(
55353             com.google.protobuf.RpcController controller,
55354             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request,
55355             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done) {
55356           impl.stopMaster(controller, request, done);
55357         }
55358 
55359         @java.lang.Override
55360         public  void balance(
55361             com.google.protobuf.RpcController controller,
55362             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request,
55363             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done) {
55364           impl.balance(controller, request, done);
55365         }
55366 
55367         @java.lang.Override
55368         public  void setBalancerRunning(
55369             com.google.protobuf.RpcController controller,
55370             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request,
55371             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done) {
55372           impl.setBalancerRunning(controller, request, done);
55373         }
55374 
55375         @java.lang.Override
55376         public  void isBalancerEnabled(
55377             com.google.protobuf.RpcController controller,
55378             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request,
55379             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse> done) {
55380           impl.isBalancerEnabled(controller, request, done);
55381         }
55382 
55383         @java.lang.Override
55384         public  void normalize(
55385             com.google.protobuf.RpcController controller,
55386             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
55387             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse> done) {
55388           impl.normalize(controller, request, done);
55389         }
55390 
55391         @java.lang.Override
55392         public  void setNormalizerRunning(
55393             com.google.protobuf.RpcController controller,
55394             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request,
55395             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse> done) {
55396           impl.setNormalizerRunning(controller, request, done);
55397         }
55398 
55399         @java.lang.Override
55400         public  void isNormalizerEnabled(
55401             com.google.protobuf.RpcController controller,
55402             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request,
55403             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse> done) {
55404           impl.isNormalizerEnabled(controller, request, done);
55405         }
55406 
55407         @java.lang.Override
55408         public  void runCatalogScan(
55409             com.google.protobuf.RpcController controller,
55410             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request,
55411             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done) {
55412           impl.runCatalogScan(controller, request, done);
55413         }
55414 
55415         @java.lang.Override
55416         public  void enableCatalogJanitor(
55417             com.google.protobuf.RpcController controller,
55418             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request,
55419             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done) {
55420           impl.enableCatalogJanitor(controller, request, done);
55421         }
55422 
55423         @java.lang.Override
55424         public  void isCatalogJanitorEnabled(
55425             com.google.protobuf.RpcController controller,
55426             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request,
55427             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done) {
55428           impl.isCatalogJanitorEnabled(controller, request, done);
55429         }
55430 
55431         @java.lang.Override
55432         public  void execMasterService(
55433             com.google.protobuf.RpcController controller,
55434             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
55435             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
55436           impl.execMasterService(controller, request, done);
55437         }
55438 
55439         @java.lang.Override
55440         public  void snapshot(
55441             com.google.protobuf.RpcController controller,
55442             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request,
55443             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done) {
55444           impl.snapshot(controller, request, done);
55445         }
55446 
55447         @java.lang.Override
55448         public  void getCompletedSnapshots(
55449             com.google.protobuf.RpcController controller,
55450             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request,
55451             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done) {
55452           impl.getCompletedSnapshots(controller, request, done);
55453         }
55454 
55455         @java.lang.Override
55456         public  void deleteSnapshot(
55457             com.google.protobuf.RpcController controller,
55458             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request,
55459             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done) {
55460           impl.deleteSnapshot(controller, request, done);
55461         }
55462 
55463         @java.lang.Override
55464         public  void isSnapshotDone(
55465             com.google.protobuf.RpcController controller,
55466             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request,
55467             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done) {
55468           impl.isSnapshotDone(controller, request, done);
55469         }
55470 
55471         @java.lang.Override
55472         public  void restoreSnapshot(
55473             com.google.protobuf.RpcController controller,
55474             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request,
55475             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done) {
55476           impl.restoreSnapshot(controller, request, done);
55477         }
55478 
55479         @java.lang.Override
55480         public  void isRestoreSnapshotDone(
55481             com.google.protobuf.RpcController controller,
55482             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request,
55483             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done) {
55484           impl.isRestoreSnapshotDone(controller, request, done);
55485         }
55486 
55487         @java.lang.Override
55488         public  void execProcedure(
55489             com.google.protobuf.RpcController controller,
55490             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
55491             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) {
55492           impl.execProcedure(controller, request, done);
55493         }
55494 
55495         @java.lang.Override
55496         public  void execProcedureWithRet(
55497             com.google.protobuf.RpcController controller,
55498             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
55499             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) {
55500           impl.execProcedureWithRet(controller, request, done);
55501         }
55502 
55503         @java.lang.Override
55504         public  void isProcedureDone(
55505             com.google.protobuf.RpcController controller,
55506             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request,
55507             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done) {
55508           impl.isProcedureDone(controller, request, done);
55509         }
55510 
55511         @java.lang.Override
55512         public  void modifyNamespace(
55513             com.google.protobuf.RpcController controller,
55514             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request,
55515             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done) {
55516           impl.modifyNamespace(controller, request, done);
55517         }
55518 
55519         @java.lang.Override
55520         public  void createNamespace(
55521             com.google.protobuf.RpcController controller,
55522             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request,
55523             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done) {
55524           impl.createNamespace(controller, request, done);
55525         }
55526 
55527         @java.lang.Override
55528         public  void deleteNamespace(
55529             com.google.protobuf.RpcController controller,
55530             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request,
55531             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done) {
55532           impl.deleteNamespace(controller, request, done);
55533         }
55534 
55535         @java.lang.Override
55536         public  void getNamespaceDescriptor(
55537             com.google.protobuf.RpcController controller,
55538             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request,
55539             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done) {
55540           impl.getNamespaceDescriptor(controller, request, done);
55541         }
55542 
55543         @java.lang.Override
55544         public  void listNamespaceDescriptors(
55545             com.google.protobuf.RpcController controller,
55546             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request,
55547             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done) {
55548           impl.listNamespaceDescriptors(controller, request, done);
55549         }
55550 
55551         @java.lang.Override
55552         public  void listTableDescriptorsByNamespace(
55553             com.google.protobuf.RpcController controller,
55554             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request,
55555             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done) {
55556           impl.listTableDescriptorsByNamespace(controller, request, done);
55557         }
55558 
55559         @java.lang.Override
55560         public  void listTableNamesByNamespace(
55561             com.google.protobuf.RpcController controller,
55562             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request,
55563             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done) {
55564           impl.listTableNamesByNamespace(controller, request, done);
55565         }
55566 
55567         @java.lang.Override
55568         public  void setQuota(
55569             com.google.protobuf.RpcController controller,
55570             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request,
55571             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse> done) {
55572           impl.setQuota(controller, request, done);
55573         }
55574 
55575         @java.lang.Override
55576         public  void getLastMajorCompactionTimestamp(
55577             com.google.protobuf.RpcController controller,
55578             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request,
55579             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done) {
55580           impl.getLastMajorCompactionTimestamp(controller, request, done);
55581         }
55582 
55583         @java.lang.Override
55584         public  void getLastMajorCompactionTimestampForRegion(
55585             com.google.protobuf.RpcController controller,
55586             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request,
55587             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done) {
55588           impl.getLastMajorCompactionTimestampForRegion(controller, request, done);
55589         }
55590 
55591         @java.lang.Override
55592         public  void getProcedureResult(
55593             com.google.protobuf.RpcController controller,
55594             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
55595             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done) {
55596           impl.getProcedureResult(controller, request, done);
55597         }
55598 
55599         @java.lang.Override
55600         public  void getSecurityCapabilities(
55601             com.google.protobuf.RpcController controller,
55602             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
55603             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done) {
55604           impl.getSecurityCapabilities(controller, request, done);
55605         }
55606 
55607         @java.lang.Override
55608         public  void abortProcedure(
55609             com.google.protobuf.RpcController controller,
55610             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
55611             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done) {
55612           impl.abortProcedure(controller, request, done);
55613         }
55614 
55615         @java.lang.Override
55616         public  void listProcedures(
55617             com.google.protobuf.RpcController controller,
55618             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
55619             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done) {
55620           impl.listProcedures(controller, request, done);
55621         }
55622 
55623       };
55624     }
55625 
55626     public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl)55627         newReflectiveBlockingService(final BlockingInterface impl) {
55628       return new com.google.protobuf.BlockingService() {
55629         public final com.google.protobuf.Descriptors.ServiceDescriptor
55630             getDescriptorForType() {
55631           return getDescriptor();
55632         }
55633 
55634         public final com.google.protobuf.Message callBlockingMethod(
55635             com.google.protobuf.Descriptors.MethodDescriptor method,
55636             com.google.protobuf.RpcController controller,
55637             com.google.protobuf.Message request)
55638             throws com.google.protobuf.ServiceException {
55639           if (method.getService() != getDescriptor()) {
55640             throw new java.lang.IllegalArgumentException(
55641               "Service.callBlockingMethod() given method descriptor for " +
55642               "wrong service type.");
55643           }
55644           switch(method.getIndex()) {
55645             case 0:
55646               return impl.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)request);
55647             case 1:
55648               return impl.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)request);
55649             case 2:
55650               return impl.getTableNames(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)request);
55651             case 3:
55652               return impl.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)request);
55653             case 4:
55654               return impl.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request);
55655             case 5:
55656               return impl.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)request);
55657             case 6:
55658               return impl.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)request);
55659             case 7:
55660               return impl.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)request);
55661             case 8:
55662               return impl.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request);
55663             case 9:
55664               return impl.dispatchMergingRegions(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)request);
55665             case 10:
55666               return impl.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request);
55667             case 11:
55668               return impl.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request);
55669             case 12:
55670               return impl.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)request);
55671             case 13:
55672               return impl.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)request);
55673             case 14:
55674               return impl.truncateTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)request);
55675             case 15:
55676               return impl.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)request);
55677             case 16:
55678               return impl.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)request);
55679             case 17:
55680               return impl.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)request);
55681             case 18:
55682               return impl.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)request);
55683             case 19:
55684               return impl.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request);
55685             case 20:
55686               return impl.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request);
55687             case 21:
55688               return impl.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request);
55689             case 22:
55690               return impl.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)request);
55691             case 23:
55692               return impl.isBalancerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)request);
55693             case 24:
55694               return impl.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request);
55695             case 25:
55696               return impl.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request);
55697             case 26:
55698               return impl.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request);
55699             case 27:
55700               return impl.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request);
55701             case 28:
55702               return impl.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request);
55703             case 29:
55704               return impl.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request);
55705             case 30:
55706               return impl.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
55707             case 31:
55708               return impl.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request);
55709             case 32:
55710               return impl.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request);
55711             case 33:
55712               return impl.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request);
55713             case 34:
55714               return impl.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request);
55715             case 35:
55716               return impl.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request);
55717             case 36:
55718               return impl.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request);
55719             case 37:
55720               return impl.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
55721             case 38:
55722               return impl.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request);
55723             case 39:
55724               return impl.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request);
55725             case 40:
55726               return impl.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request);
55727             case 41:
55728               return impl.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request);
55729             case 42:
55730               return impl.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request);
55731             case 43:
55732               return impl.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request);
55733             case 44:
55734               return impl.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request);
55735             case 45:
55736               return impl.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request);
55737             case 46:
55738               return impl.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request);
55739             case 47:
55740               return impl.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request);
55741             case 48:
55742               return impl.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request);
55743             case 49:
55744               return impl.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request);
55745             case 50:
55746               return impl.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request);
55747             case 51:
55748               return impl.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request);
55749             case 52:
55750               return impl.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request);
55751             case 53:
55752               return impl.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request);
55753             default:
55754               throw new java.lang.AssertionError("Can't get here.");
55755           }
55756         }
55757 
55758         public final com.google.protobuf.Message
55759             getRequestPrototype(
55760             com.google.protobuf.Descriptors.MethodDescriptor method) {
55761           if (method.getService() != getDescriptor()) {
55762             throw new java.lang.IllegalArgumentException(
55763               "Service.getRequestPrototype() given method " +
55764               "descriptor for wrong service type.");
55765           }
55766           switch(method.getIndex()) {
55767             case 0:
55768               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance();
55769             case 1:
55770               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance();
55771             case 2:
55772               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance();
55773             case 3:
55774               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance();
55775             case 4:
55776               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance();
55777             case 5:
55778               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance();
55779             case 6:
55780               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance();
55781             case 7:
55782               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance();
55783             case 8:
55784               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance();
55785             case 9:
55786               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance();
55787             case 10:
55788               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance();
55789             case 11:
55790               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance();
55791             case 12:
55792               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance();
55793             case 13:
55794               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance();
55795             case 14:
55796               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance();
55797             case 15:
55798               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance();
55799             case 16:
55800               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance();
55801             case 17:
55802               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance();
55803             case 18:
55804               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance();
55805             case 19:
55806               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance();
55807             case 20:
55808               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance();
55809             case 21:
55810               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance();
55811             case 22:
55812               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance();
55813             case 23:
55814               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
55815             case 24:
55816               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
55817             case 25:
55818               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
55819             case 26:
55820               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
55821             case 27:
55822               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
55823             case 28:
55824               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
55825             case 29:
55826               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
55827             case 30:
55828               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
55829             case 31:
55830               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
55831             case 32:
55832               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
55833             case 33:
55834               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
55835             case 34:
55836               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
55837             case 35:
55838               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
55839             case 36:
55840               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
55841             case 37:
55842               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
55843             case 38:
55844               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
55845             case 39:
55846               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
55847             case 40:
55848               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
55849             case 41:
55850               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
55851             case 42:
55852               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
55853             case 43:
55854               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
55855             case 44:
55856               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
55857             case 45:
55858               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
55859             case 46:
55860               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
55861             case 47:
55862               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
55863             case 48:
55864               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
55865             case 49:
55866               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
55867             case 50:
55868               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
55869             case 51:
55870               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
55871             case 52:
55872               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
55873             case 53:
55874               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
55875             default:
55876               throw new java.lang.AssertionError("Can't get here.");
55877           }
55878         }
55879 
55880         public final com.google.protobuf.Message
55881             getResponsePrototype(
55882             com.google.protobuf.Descriptors.MethodDescriptor method) {
55883           if (method.getService() != getDescriptor()) {
55884             throw new java.lang.IllegalArgumentException(
55885               "Service.getResponsePrototype() given method " +
55886               "descriptor for wrong service type.");
55887           }
55888           switch(method.getIndex()) {
55889             case 0:
55890               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance();
55891             case 1:
55892               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance();
55893             case 2:
55894               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance();
55895             case 3:
55896               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance();
55897             case 4:
55898               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance();
55899             case 5:
55900               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance();
55901             case 6:
55902               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance();
55903             case 7:
55904               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance();
55905             case 8:
55906               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance();
55907             case 9:
55908               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance();
55909             case 10:
55910               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance();
55911             case 11:
55912               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance();
55913             case 12:
55914               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance();
55915             case 13:
55916               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance();
55917             case 14:
55918               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance();
55919             case 15:
55920               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance();
55921             case 16:
55922               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance();
55923             case 17:
55924               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance();
55925             case 18:
55926               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance();
55927             case 19:
55928               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance();
55929             case 20:
55930               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance();
55931             case 21:
55932               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance();
55933             case 22:
55934               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance();
55935             case 23:
55936               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
55937             case 24:
55938               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
55939             case 25:
55940               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
55941             case 26:
55942               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
55943             case 27:
55944               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
55945             case 28:
55946               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
55947             case 29:
55948               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
55949             case 30:
55950               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
55951             case 31:
55952               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
55953             case 32:
55954               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
55955             case 33:
55956               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
55957             case 34:
55958               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
55959             case 35:
55960               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
55961             case 36:
55962               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
55963             case 37:
55964               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
55965             case 38:
55966               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
55967             case 39:
55968               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
55969             case 40:
55970               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
55971             case 41:
55972               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
55973             case 42:
55974               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
55975             case 43:
55976               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
55977             case 44:
55978               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
55979             case 45:
55980               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
55981             case 46:
55982               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
55983             case 47:
55984               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
55985             case 48:
55986               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
55987             case 49:
55988               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
55989             case 50:
55990               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
55991             case 51:
55992               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
55993             case 52:
55994               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
55995             case 53:
55996               return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
55997             default:
55998               throw new java.lang.AssertionError("Can't get here.");
55999           }
56000         }
56001 
56002       };
56003     }
56004 
56005     /**
56006      * <code>rpc GetSchemaAlterStatus(.GetSchemaAlterStatusRequest) returns (.GetSchemaAlterStatusResponse);</code>
56007      *
56008      * <pre>
56009      ** Used by the client to get the number of regions that have received the updated schema
56010      * </pre>
56011      */
56012     public abstract void getSchemaAlterStatus(
56013         com.google.protobuf.RpcController controller,
56014         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request,
56015         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done);
56016 
56017     /**
56018      * <code>rpc GetTableDescriptors(.GetTableDescriptorsRequest) returns (.GetTableDescriptorsResponse);</code>
56019      *
56020      * <pre>
56021      ** Get list of TableDescriptors for requested tables.
56022      * </pre>
56023      */
56024     public abstract void getTableDescriptors(
56025         com.google.protobuf.RpcController controller,
56026         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request,
56027         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done);
56028 
56029     /**
56030      * <code>rpc GetTableNames(.GetTableNamesRequest) returns (.GetTableNamesResponse);</code>
56031      *
56032      * <pre>
56033      ** Get the list of table names.
56034      * </pre>
56035      */
56036     public abstract void getTableNames(
56037         com.google.protobuf.RpcController controller,
56038         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request,
56039         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done);
56040 
56041     /**
56042      * <code>rpc GetClusterStatus(.GetClusterStatusRequest) returns (.GetClusterStatusResponse);</code>
56043      *
56044      * <pre>
56045      ** Return cluster status.
56046      * </pre>
56047      */
56048     public abstract void getClusterStatus(
56049         com.google.protobuf.RpcController controller,
56050         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request,
56051         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done);
56052 
56053     /**
56054      * <code>rpc IsMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code>
56055      *
56056      * <pre>
56057      ** return true if master is available
56058      * </pre>
56059      */
56060     public abstract void isMasterRunning(
56061         com.google.protobuf.RpcController controller,
56062         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
56063         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done);
56064 
56065     /**
56066      * <code>rpc AddColumn(.AddColumnRequest) returns (.AddColumnResponse);</code>
56067      *
56068      * <pre>
56069      ** Adds a column to the specified table.
56070      * </pre>
56071      */
56072     public abstract void addColumn(
56073         com.google.protobuf.RpcController controller,
56074         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request,
56075         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done);
56076 
56077     /**
56078      * <code>rpc DeleteColumn(.DeleteColumnRequest) returns (.DeleteColumnResponse);</code>
56079      *
56080      * <pre>
56081      ** Deletes a column from the specified table. Table must be disabled.
56082      * </pre>
56083      */
56084     public abstract void deleteColumn(
56085         com.google.protobuf.RpcController controller,
56086         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request,
56087         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done);
56088 
56089     /**
56090      * <code>rpc ModifyColumn(.ModifyColumnRequest) returns (.ModifyColumnResponse);</code>
56091      *
56092      * <pre>
56093      ** Modifies an existing column on the specified table.
56094      * </pre>
56095      */
56096     public abstract void modifyColumn(
56097         com.google.protobuf.RpcController controller,
56098         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request,
56099         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done);
56100 
56101     /**
56102      * <code>rpc MoveRegion(.MoveRegionRequest) returns (.MoveRegionResponse);</code>
56103      *
56104      * <pre>
56105      ** Move the region region to the destination server.
56106      * </pre>
56107      */
56108     public abstract void moveRegion(
56109         com.google.protobuf.RpcController controller,
56110         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request,
56111         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done);
56112 
56113     /**
56114      * <code>rpc DispatchMergingRegions(.DispatchMergingRegionsRequest) returns (.DispatchMergingRegionsResponse);</code>
56115      *
56116      * <pre>
56117      ** Master dispatch merging the regions
56118      * </pre>
56119      */
56120     public abstract void dispatchMergingRegions(
56121         com.google.protobuf.RpcController controller,
56122         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request,
56123         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done);
56124 
56125     /**
56126      * <code>rpc AssignRegion(.AssignRegionRequest) returns (.AssignRegionResponse);</code>
56127      *
56128      * <pre>
56129      ** Assign a region to a server chosen at random.
56130      * </pre>
56131      */
56132     public abstract void assignRegion(
56133         com.google.protobuf.RpcController controller,
56134         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request,
56135         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done);
56136 
56137     /**
56138      * <code>rpc UnassignRegion(.UnassignRegionRequest) returns (.UnassignRegionResponse);</code>
56139      *
56140      * <pre>
56141      **
56142      * Unassign a region from current hosting regionserver.  Region will then be
56143      * assigned to a regionserver chosen at random.  Region could be reassigned
56144      * back to the same server.  Use MoveRegion if you want
56145      * to control the region movement.
56146      * </pre>
56147      */
56148     public abstract void unassignRegion(
56149         com.google.protobuf.RpcController controller,
56150         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request,
56151         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done);
56152 
56153     /**
56154      * <code>rpc OfflineRegion(.OfflineRegionRequest) returns (.OfflineRegionResponse);</code>
56155      *
56156      * <pre>
56157      **
56158      * Offline a region from the assignment manager's in-memory state.  The
56159      * region should be in a closed state and there will be no attempt to
56160      * automatically reassign the region as in unassign.   This is a special
56161      * method, and should only be used by experts or hbck.
56162      * </pre>
56163      */
56164     public abstract void offlineRegion(
56165         com.google.protobuf.RpcController controller,
56166         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request,
56167         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done);
56168 
56169     /**
56170      * <code>rpc DeleteTable(.DeleteTableRequest) returns (.DeleteTableResponse);</code>
56171      *
56172      * <pre>
56173      ** Deletes a table
56174      * </pre>
56175      */
56176     public abstract void deleteTable(
56177         com.google.protobuf.RpcController controller,
56178         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request,
56179         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done);
56180 
56181     /**
56182      * <code>rpc truncateTable(.TruncateTableRequest) returns (.TruncateTableResponse);</code>
56183      *
56184      * <pre>
56185      ** Truncate a table
56186      * </pre>
56187      */
56188     public abstract void truncateTable(
56189         com.google.protobuf.RpcController controller,
56190         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request,
56191         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done);
56192 
56193     /**
56194      * <code>rpc EnableTable(.EnableTableRequest) returns (.EnableTableResponse);</code>
56195      *
56196      * <pre>
56197      ** Puts the table on-line (only needed if table has been previously taken offline)
56198      * </pre>
56199      */
56200     public abstract void enableTable(
56201         com.google.protobuf.RpcController controller,
56202         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request,
56203         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done);
56204 
56205     /**
56206      * <code>rpc DisableTable(.DisableTableRequest) returns (.DisableTableResponse);</code>
56207      *
56208      * <pre>
56209      ** Take table offline
56210      * </pre>
56211      */
56212     public abstract void disableTable(
56213         com.google.protobuf.RpcController controller,
56214         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request,
56215         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done);
56216 
56217     /**
56218      * <code>rpc ModifyTable(.ModifyTableRequest) returns (.ModifyTableResponse);</code>
56219      *
56220      * <pre>
56221      ** Modify a table's metadata
56222      * </pre>
56223      */
56224     public abstract void modifyTable(
56225         com.google.protobuf.RpcController controller,
56226         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request,
56227         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done);
56228 
56229     /**
56230      * <code>rpc CreateTable(.CreateTableRequest) returns (.CreateTableResponse);</code>
56231      *
56232      * <pre>
56233      ** Creates a new table asynchronously
56234      * </pre>
56235      */
56236     public abstract void createTable(
56237         com.google.protobuf.RpcController controller,
56238         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request,
56239         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done);
56240 
56241     /**
56242      * <code>rpc Shutdown(.ShutdownRequest) returns (.ShutdownResponse);</code>
56243      *
56244      * <pre>
56245      ** Shutdown an HBase cluster.
56246      * </pre>
56247      */
56248     public abstract void shutdown(
56249         com.google.protobuf.RpcController controller,
56250         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request,
56251         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done);
56252 
56253     /**
56254      * <code>rpc StopMaster(.StopMasterRequest) returns (.StopMasterResponse);</code>
56255      *
56256      * <pre>
56257      ** Stop HBase Master only.  Does not shutdown the cluster.
56258      * </pre>
56259      */
56260     public abstract void stopMaster(
56261         com.google.protobuf.RpcController controller,
56262         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request,
56263         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done);
56264 
56265     /**
56266      * <code>rpc Balance(.BalanceRequest) returns (.BalanceResponse);</code>
56267      *
56268      * <pre>
56269      **
56270      * Run the balancer.  Will run the balancer and if regions to move, it will
56271      * go ahead and do the reassignments.  Can NOT run for various reasons.
56272      * Check logs.
56273      * </pre>
56274      */
56275     public abstract void balance(
56276         com.google.protobuf.RpcController controller,
56277         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request,
56278         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done);
56279 
56280     /**
56281      * <code>rpc SetBalancerRunning(.SetBalancerRunningRequest) returns (.SetBalancerRunningResponse);</code>
56282      *
56283      * <pre>
56284      **
56285      * Turn the load balancer on or off.
56286      * If synchronous is true, it waits until current balance() call, if outstanding, to return.
56287      * </pre>
56288      */
56289     public abstract void setBalancerRunning(
56290         com.google.protobuf.RpcController controller,
56291         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request,
56292         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done);
56293 
56294     /**
56295      * <code>rpc IsBalancerEnabled(.IsBalancerEnabledRequest) returns (.IsBalancerEnabledResponse);</code>
56296      *
56297      * <pre>
56298      **
56299      * Query whether the Region Balancer is running.
56300      * </pre>
56301      */
56302     public abstract void isBalancerEnabled(
56303         com.google.protobuf.RpcController controller,
56304         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request,
56305         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse> done);
56306 
56307     /**
56308      * <code>rpc Normalize(.NormalizeRequest) returns (.NormalizeResponse);</code>
56309      *
56310      * <pre>
56311      **
56312      * Run region normalizer. Can NOT run for various reasons. Check logs.
56313      * </pre>
56314      */
56315     public abstract void normalize(
56316         com.google.protobuf.RpcController controller,
56317         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
56318         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse> done);
56319 
56320     /**
56321      * <code>rpc SetNormalizerRunning(.SetNormalizerRunningRequest) returns (.SetNormalizerRunningResponse);</code>
56322      *
56323      * <pre>
56324      **
56325      * Turn region normalizer on or off.
56326      * </pre>
56327      */
56328     public abstract void setNormalizerRunning(
56329         com.google.protobuf.RpcController controller,
56330         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request,
56331         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse> done);
56332 
56333     /**
56334      * <code>rpc IsNormalizerEnabled(.IsNormalizerEnabledRequest) returns (.IsNormalizerEnabledResponse);</code>
56335      *
56336      * <pre>
56337      **
56338      * Query whether region normalizer is enabled.
56339      * </pre>
56340      */
56341     public abstract void isNormalizerEnabled(
56342         com.google.protobuf.RpcController controller,
56343         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request,
56344         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse> done);
56345 
56346     /**
56347      * <code>rpc RunCatalogScan(.RunCatalogScanRequest) returns (.RunCatalogScanResponse);</code>
56348      *
56349      * <pre>
56350      ** Get a run of the catalog janitor
56351      * </pre>
56352      */
56353     public abstract void runCatalogScan(
56354         com.google.protobuf.RpcController controller,
56355         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request,
56356         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done);
56357 
56358     /**
56359      * <code>rpc EnableCatalogJanitor(.EnableCatalogJanitorRequest) returns (.EnableCatalogJanitorResponse);</code>
56360      *
56361      * <pre>
56362      **
56363      * Enable the catalog janitor on or off.
56364      * </pre>
56365      */
56366     public abstract void enableCatalogJanitor(
56367         com.google.protobuf.RpcController controller,
56368         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request,
56369         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done);
56370 
56371     /**
56372      * <code>rpc IsCatalogJanitorEnabled(.IsCatalogJanitorEnabledRequest) returns (.IsCatalogJanitorEnabledResponse);</code>
56373      *
56374      * <pre>
56375      **
56376      * Query whether the catalog janitor is enabled.
56377      * </pre>
56378      */
56379     public abstract void isCatalogJanitorEnabled(
56380         com.google.protobuf.RpcController controller,
56381         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request,
56382         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done);
56383 
56384     /**
56385      * <code>rpc ExecMasterService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
56386      *
56387      * <pre>
56388      **
56389      * Call a master coprocessor endpoint
56390      * </pre>
56391      */
56392     public abstract void execMasterService(
56393         com.google.protobuf.RpcController controller,
56394         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
56395         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
56396 
56397     /**
56398      * <code>rpc Snapshot(.SnapshotRequest) returns (.SnapshotResponse);</code>
56399      *
56400      * <pre>
56401      **
56402      * Create a snapshot for the given table.
56403      * </pre>
56404      */
56405     public abstract void snapshot(
56406         com.google.protobuf.RpcController controller,
56407         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request,
56408         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done);
56409 
56410     /**
56411      * <code>rpc GetCompletedSnapshots(.GetCompletedSnapshotsRequest) returns (.GetCompletedSnapshotsResponse);</code>
56412      *
56413      * <pre>
56414      **
56415      * Get completed snapshots.
56416      * Returns a list of snapshot descriptors for completed snapshots
56417      * </pre>
56418      */
56419     public abstract void getCompletedSnapshots(
56420         com.google.protobuf.RpcController controller,
56421         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request,
56422         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done);
56423 
56424     /**
56425      * <code>rpc DeleteSnapshot(.DeleteSnapshotRequest) returns (.DeleteSnapshotResponse);</code>
56426      *
56427      * <pre>
56428      **
56429      * Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
56430      * </pre>
56431      */
56432     public abstract void deleteSnapshot(
56433         com.google.protobuf.RpcController controller,
56434         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request,
56435         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done);
56436 
56437     /**
56438      * <code>rpc IsSnapshotDone(.IsSnapshotDoneRequest) returns (.IsSnapshotDoneResponse);</code>
56439      *
56440      * <pre>
56441      **
56442      * Determine if the snapshot is done yet.
56443      * </pre>
56444      */
56445     public abstract void isSnapshotDone(
56446         com.google.protobuf.RpcController controller,
56447         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request,
56448         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done);
56449 
56450     /**
56451      * <code>rpc RestoreSnapshot(.RestoreSnapshotRequest) returns (.RestoreSnapshotResponse);</code>
56452      *
56453      * <pre>
56454      **
56455      * Restore a snapshot
56456      * </pre>
56457      */
56458     public abstract void restoreSnapshot(
56459         com.google.protobuf.RpcController controller,
56460         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request,
56461         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done);
56462 
56463     /**
56464      * <code>rpc IsRestoreSnapshotDone(.IsRestoreSnapshotDoneRequest) returns (.IsRestoreSnapshotDoneResponse);</code>
56465      *
56466      * <pre>
56467      **
56468      * Determine if the snapshot restore is done yet.
56469      * </pre>
56470      */
56471     public abstract void isRestoreSnapshotDone(
56472         com.google.protobuf.RpcController controller,
56473         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request,
56474         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done);
56475 
56476     /**
56477      * <code>rpc ExecProcedure(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code>
56478      *
56479      * <pre>
56480      **
56481      * Execute a distributed procedure.
56482      * </pre>
56483      */
56484     public abstract void execProcedure(
56485         com.google.protobuf.RpcController controller,
56486         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
56487         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done);
56488 
56489     /**
56490      * <code>rpc ExecProcedureWithRet(.ExecProcedureRequest) returns (.ExecProcedureResponse);</code>
56491      *
56492      * <pre>
56493      **
56494      * Execute a distributed procedure with return data.
56495      * </pre>
56496      */
56497     public abstract void execProcedureWithRet(
56498         com.google.protobuf.RpcController controller,
56499         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
56500         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done);
56501 
56502     /**
56503      * <code>rpc IsProcedureDone(.IsProcedureDoneRequest) returns (.IsProcedureDoneResponse);</code>
56504      *
56505      * <pre>
56506      **
56507      * Determine if the procedure is done yet.
56508      * </pre>
56509      */
56510     public abstract void isProcedureDone(
56511         com.google.protobuf.RpcController controller,
56512         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request,
56513         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done);
56514 
56515     /**
56516      * <code>rpc ModifyNamespace(.ModifyNamespaceRequest) returns (.ModifyNamespaceResponse);</code>
56517      *
56518      * <pre>
56519      ** Modify a namespace's metadata
56520      * </pre>
56521      */
56522     public abstract void modifyNamespace(
56523         com.google.protobuf.RpcController controller,
56524         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request,
56525         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done);
56526 
56527     /**
56528      * <code>rpc CreateNamespace(.CreateNamespaceRequest) returns (.CreateNamespaceResponse);</code>
56529      *
56530      * <pre>
56531      ** Creates a new namespace synchronously
56532      * </pre>
56533      */
56534     public abstract void createNamespace(
56535         com.google.protobuf.RpcController controller,
56536         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request,
56537         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done);
56538 
56539     /**
56540      * <code>rpc DeleteNamespace(.DeleteNamespaceRequest) returns (.DeleteNamespaceResponse);</code>
56541      *
56542      * <pre>
56543      ** Deletes namespace synchronously
56544      * </pre>
56545      */
56546     public abstract void deleteNamespace(
56547         com.google.protobuf.RpcController controller,
56548         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request,
56549         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done);
56550 
56551     /**
56552      * <code>rpc GetNamespaceDescriptor(.GetNamespaceDescriptorRequest) returns (.GetNamespaceDescriptorResponse);</code>
56553      *
56554      * <pre>
56555      ** Get a namespace descriptor by name
56556      * </pre>
56557      */
56558     public abstract void getNamespaceDescriptor(
56559         com.google.protobuf.RpcController controller,
56560         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request,
56561         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done);
56562 
56563     /**
56564      * <code>rpc ListNamespaceDescriptors(.ListNamespaceDescriptorsRequest) returns (.ListNamespaceDescriptorsResponse);</code>
56565      *
56566      * <pre>
56567      ** returns a list of namespaces
56568      * </pre>
56569      */
56570     public abstract void listNamespaceDescriptors(
56571         com.google.protobuf.RpcController controller,
56572         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request,
56573         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done);
56574 
56575     /**
56576      * <code>rpc ListTableDescriptorsByNamespace(.ListTableDescriptorsByNamespaceRequest) returns (.ListTableDescriptorsByNamespaceResponse);</code>
56577      *
56578      * <pre>
56579      ** returns a list of tables for a given namespace
56580      * </pre>
56581      */
56582     public abstract void listTableDescriptorsByNamespace(
56583         com.google.protobuf.RpcController controller,
56584         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request,
56585         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done);
56586 
56587     /**
56588      * <code>rpc ListTableNamesByNamespace(.ListTableNamesByNamespaceRequest) returns (.ListTableNamesByNamespaceResponse);</code>
56589      *
56590      * <pre>
56591      ** returns a list of tables for a given namespace
56592      * </pre>
56593      */
56594     public abstract void listTableNamesByNamespace(
56595         com.google.protobuf.RpcController controller,
56596         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request,
56597         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done);
56598 
56599     /**
56600      * <code>rpc SetQuota(.SetQuotaRequest) returns (.SetQuotaResponse);</code>
56601      *
56602      * <pre>
56603      ** Apply the new quota settings
56604      * </pre>
56605      */
56606     public abstract void setQuota(
56607         com.google.protobuf.RpcController controller,
56608         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request,
56609         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse> done);
56610 
56611     /**
56612      * <code>rpc getLastMajorCompactionTimestamp(.MajorCompactionTimestampRequest) returns (.MajorCompactionTimestampResponse);</code>
56613      *
56614      * <pre>
56615      ** Returns the timestamp of the last major compaction
56616      * </pre>
56617      */
56618     public abstract void getLastMajorCompactionTimestamp(
56619         com.google.protobuf.RpcController controller,
56620         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request,
56621         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done);
56622 
56623     /**
56624      * <code>rpc getLastMajorCompactionTimestampForRegion(.MajorCompactionTimestampForRegionRequest) returns (.MajorCompactionTimestampResponse);</code>
56625      *
56626      * <pre>
56627      ** Returns the timestamp of the last major compaction
56628      * </pre>
56629      */
56630     public abstract void getLastMajorCompactionTimestampForRegion(
56631         com.google.protobuf.RpcController controller,
56632         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request,
56633         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done);
56634 
56635     /**
56636      * <code>rpc getProcedureResult(.GetProcedureResultRequest) returns (.GetProcedureResultResponse);</code>
56637      */
56638     public abstract void getProcedureResult(
56639         com.google.protobuf.RpcController controller,
56640         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
56641         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done);
56642 
56643     /**
56644      * <code>rpc getSecurityCapabilities(.SecurityCapabilitiesRequest) returns (.SecurityCapabilitiesResponse);</code>
56645      *
56646      * <pre>
56647      ** Returns the security capabilities in effect on the cluster
56648      * </pre>
56649      */
56650     public abstract void getSecurityCapabilities(
56651         com.google.protobuf.RpcController controller,
56652         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
56653         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done);
56654 
56655     /**
56656      * <code>rpc AbortProcedure(.AbortProcedureRequest) returns (.AbortProcedureResponse);</code>
56657      *
56658      * <pre>
56659      ** Abort a procedure
56660      * </pre>
56661      */
56662     public abstract void abortProcedure(
56663         com.google.protobuf.RpcController controller,
56664         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
56665         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done);
56666 
56667     /**
56668      * <code>rpc ListProcedures(.ListProceduresRequest) returns (.ListProceduresResponse);</code>
56669      *
56670      * <pre>
56671      ** returns a list of procedures
56672      * </pre>
56673      */
56674     public abstract void listProcedures(
56675         com.google.protobuf.RpcController controller,
56676         org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
56677         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done);
56678 
56679     public static final
56680         com.google.protobuf.Descriptors.ServiceDescriptor
56681         getDescriptor() {
56682       return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.getDescriptor().getServices().get(0);
56683     }
56684     public final com.google.protobuf.Descriptors.ServiceDescriptor
56685         getDescriptorForType() {
56686       return getDescriptor();
56687     }
56688 
56689     public final void callMethod(
56690         com.google.protobuf.Descriptors.MethodDescriptor method,
56691         com.google.protobuf.RpcController controller,
56692         com.google.protobuf.Message request,
56693         com.google.protobuf.RpcCallback<
56694           com.google.protobuf.Message> done) {
56695       if (method.getService() != getDescriptor()) {
56696         throw new java.lang.IllegalArgumentException(
56697           "Service.callMethod() given method descriptor for wrong " +
56698           "service type.");
56699       }
56700       switch(method.getIndex()) {
56701         case 0:
56702           this.getSchemaAlterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest)request,
56703             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse>specializeCallback(
56704               done));
56705           return;
56706         case 1:
56707           this.getTableDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest)request,
56708             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse>specializeCallback(
56709               done));
56710           return;
56711         case 2:
56712           this.getTableNames(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest)request,
56713             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse>specializeCallback(
56714               done));
56715           return;
56716         case 3:
56717           this.getClusterStatus(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest)request,
56718             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse>specializeCallback(
56719               done));
56720           return;
56721         case 4:
56722           this.isMasterRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest)request,
56723             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse>specializeCallback(
56724               done));
56725           return;
56726         case 5:
56727           this.addColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest)request,
56728             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse>specializeCallback(
56729               done));
56730           return;
56731         case 6:
56732           this.deleteColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest)request,
56733             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse>specializeCallback(
56734               done));
56735           return;
56736         case 7:
56737           this.modifyColumn(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest)request,
56738             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse>specializeCallback(
56739               done));
56740           return;
56741         case 8:
56742           this.moveRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest)request,
56743             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse>specializeCallback(
56744               done));
56745           return;
56746         case 9:
56747           this.dispatchMergingRegions(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest)request,
56748             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse>specializeCallback(
56749               done));
56750           return;
56751         case 10:
56752           this.assignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest)request,
56753             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse>specializeCallback(
56754               done));
56755           return;
56756         case 11:
56757           this.unassignRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest)request,
56758             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse>specializeCallback(
56759               done));
56760           return;
56761         case 12:
56762           this.offlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest)request,
56763             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse>specializeCallback(
56764               done));
56765           return;
56766         case 13:
56767           this.deleteTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest)request,
56768             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse>specializeCallback(
56769               done));
56770           return;
56771         case 14:
56772           this.truncateTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest)request,
56773             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse>specializeCallback(
56774               done));
56775           return;
56776         case 15:
56777           this.enableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest)request,
56778             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse>specializeCallback(
56779               done));
56780           return;
56781         case 16:
56782           this.disableTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest)request,
56783             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse>specializeCallback(
56784               done));
56785           return;
56786         case 17:
56787           this.modifyTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest)request,
56788             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse>specializeCallback(
56789               done));
56790           return;
56791         case 18:
56792           this.createTable(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest)request,
56793             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse>specializeCallback(
56794               done));
56795           return;
56796         case 19:
56797           this.shutdown(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest)request,
56798             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse>specializeCallback(
56799               done));
56800           return;
56801         case 20:
56802           this.stopMaster(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest)request,
56803             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse>specializeCallback(
56804               done));
56805           return;
56806         case 21:
56807           this.balance(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest)request,
56808             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse>specializeCallback(
56809               done));
56810           return;
56811         case 22:
56812           this.setBalancerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest)request,
56813             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse>specializeCallback(
56814               done));
56815           return;
56816         case 23:
56817           this.isBalancerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest)request,
56818             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse>specializeCallback(
56819               done));
56820           return;
56821         case 24:
56822           this.normalize(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest)request,
56823             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse>specializeCallback(
56824               done));
56825           return;
56826         case 25:
56827           this.setNormalizerRunning(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest)request,
56828             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse>specializeCallback(
56829               done));
56830           return;
56831         case 26:
56832           this.isNormalizerEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest)request,
56833             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse>specializeCallback(
56834               done));
56835           return;
56836         case 27:
56837           this.runCatalogScan(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest)request,
56838             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse>specializeCallback(
56839               done));
56840           return;
56841         case 28:
56842           this.enableCatalogJanitor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest)request,
56843             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse>specializeCallback(
56844               done));
56845           return;
56846         case 29:
56847           this.isCatalogJanitorEnabled(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest)request,
56848             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse>specializeCallback(
56849               done));
56850           return;
56851         case 30:
56852           this.execMasterService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
56853             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
56854               done));
56855           return;
56856         case 31:
56857           this.snapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest)request,
56858             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse>specializeCallback(
56859               done));
56860           return;
56861         case 32:
56862           this.getCompletedSnapshots(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest)request,
56863             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse>specializeCallback(
56864               done));
56865           return;
56866         case 33:
56867           this.deleteSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest)request,
56868             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse>specializeCallback(
56869               done));
56870           return;
56871         case 34:
56872           this.isSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest)request,
56873             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse>specializeCallback(
56874               done));
56875           return;
56876         case 35:
56877           this.restoreSnapshot(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest)request,
56878             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse>specializeCallback(
56879               done));
56880           return;
56881         case 36:
56882           this.isRestoreSnapshotDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest)request,
56883             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse>specializeCallback(
56884               done));
56885           return;
56886         case 37:
56887           this.execProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request,
56888             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse>specializeCallback(
56889               done));
56890           return;
56891         case 38:
56892           this.execProcedureWithRet(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest)request,
56893             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse>specializeCallback(
56894               done));
56895           return;
56896         case 39:
56897           this.isProcedureDone(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest)request,
56898             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse>specializeCallback(
56899               done));
56900           return;
56901         case 40:
56902           this.modifyNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest)request,
56903             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse>specializeCallback(
56904               done));
56905           return;
56906         case 41:
56907           this.createNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest)request,
56908             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse>specializeCallback(
56909               done));
56910           return;
56911         case 42:
56912           this.deleteNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest)request,
56913             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse>specializeCallback(
56914               done));
56915           return;
56916         case 43:
56917           this.getNamespaceDescriptor(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest)request,
56918             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse>specializeCallback(
56919               done));
56920           return;
56921         case 44:
56922           this.listNamespaceDescriptors(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest)request,
56923             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse>specializeCallback(
56924               done));
56925           return;
56926         case 45:
56927           this.listTableDescriptorsByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest)request,
56928             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse>specializeCallback(
56929               done));
56930           return;
56931         case 46:
56932           this.listTableNamesByNamespace(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest)request,
56933             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse>specializeCallback(
56934               done));
56935           return;
56936         case 47:
56937           this.setQuota(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest)request,
56938             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse>specializeCallback(
56939               done));
56940           return;
56941         case 48:
56942           this.getLastMajorCompactionTimestamp(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest)request,
56943             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse>specializeCallback(
56944               done));
56945           return;
56946         case 49:
56947           this.getLastMajorCompactionTimestampForRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest)request,
56948             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse>specializeCallback(
56949               done));
56950           return;
56951         case 50:
56952           this.getProcedureResult(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest)request,
56953             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse>specializeCallback(
56954               done));
56955           return;
56956         case 51:
56957           this.getSecurityCapabilities(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest)request,
56958             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse>specializeCallback(
56959               done));
56960           return;
56961         case 52:
56962           this.abortProcedure(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest)request,
56963             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse>specializeCallback(
56964               done));
56965           return;
56966         case 53:
56967           this.listProcedures(controller, (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest)request,
56968             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse>specializeCallback(
56969               done));
56970           return;
56971         default:
56972           throw new java.lang.AssertionError("Can't get here.");
56973       }
56974     }
56975 
56976     public final com.google.protobuf.Message
56977         getRequestPrototype(
56978         com.google.protobuf.Descriptors.MethodDescriptor method) {
56979       if (method.getService() != getDescriptor()) {
56980         throw new java.lang.IllegalArgumentException(
56981           "Service.getRequestPrototype() given method " +
56982           "descriptor for wrong service type.");
56983       }
56984       switch(method.getIndex()) {
56985         case 0:
56986           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest.getDefaultInstance();
56987         case 1:
56988           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest.getDefaultInstance();
56989         case 2:
56990           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest.getDefaultInstance();
56991         case 3:
56992           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest.getDefaultInstance();
56993         case 4:
56994           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDefaultInstance();
56995         case 5:
56996           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest.getDefaultInstance();
56997         case 6:
56998           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest.getDefaultInstance();
56999         case 7:
57000           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest.getDefaultInstance();
57001         case 8:
57002           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest.getDefaultInstance();
57003         case 9:
57004           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest.getDefaultInstance();
57005         case 10:
57006           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest.getDefaultInstance();
57007         case 11:
57008           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest.getDefaultInstance();
57009         case 12:
57010           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest.getDefaultInstance();
57011         case 13:
57012           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest.getDefaultInstance();
57013         case 14:
57014           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest.getDefaultInstance();
57015         case 15:
57016           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest.getDefaultInstance();
57017         case 16:
57018           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest.getDefaultInstance();
57019         case 17:
57020           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest.getDefaultInstance();
57021         case 18:
57022           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest.getDefaultInstance();
57023         case 19:
57024           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest.getDefaultInstance();
57025         case 20:
57026           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest.getDefaultInstance();
57027         case 21:
57028           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest.getDefaultInstance();
57029         case 22:
57030           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest.getDefaultInstance();
57031         case 23:
57032           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest.getDefaultInstance();
57033         case 24:
57034           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest.getDefaultInstance();
57035         case 25:
57036           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest.getDefaultInstance();
57037         case 26:
57038           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest.getDefaultInstance();
57039         case 27:
57040           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest.getDefaultInstance();
57041         case 28:
57042           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest.getDefaultInstance();
57043         case 29:
57044           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest.getDefaultInstance();
57045         case 30:
57046           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
57047         case 31:
57048           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest.getDefaultInstance();
57049         case 32:
57050           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest.getDefaultInstance();
57051         case 33:
57052           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest.getDefaultInstance();
57053         case 34:
57054           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest.getDefaultInstance();
57055         case 35:
57056           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest.getDefaultInstance();
57057         case 36:
57058           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest.getDefaultInstance();
57059         case 37:
57060           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
57061         case 38:
57062           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest.getDefaultInstance();
57063         case 39:
57064           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest.getDefaultInstance();
57065         case 40:
57066           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest.getDefaultInstance();
57067         case 41:
57068           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest.getDefaultInstance();
57069         case 42:
57070           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest.getDefaultInstance();
57071         case 43:
57072           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest.getDefaultInstance();
57073         case 44:
57074           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest.getDefaultInstance();
57075         case 45:
57076           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest.getDefaultInstance();
57077         case 46:
57078           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest.getDefaultInstance();
57079         case 47:
57080           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest.getDefaultInstance();
57081         case 48:
57082           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest.getDefaultInstance();
57083         case 49:
57084           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest.getDefaultInstance();
57085         case 50:
57086           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest.getDefaultInstance();
57087         case 51:
57088           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest.getDefaultInstance();
57089         case 52:
57090           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest.getDefaultInstance();
57091         case 53:
57092           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest.getDefaultInstance();
57093         default:
57094           throw new java.lang.AssertionError("Can't get here.");
57095       }
57096     }
57097 
57098     public final com.google.protobuf.Message
57099         getResponsePrototype(
57100         com.google.protobuf.Descriptors.MethodDescriptor method) {
57101       if (method.getService() != getDescriptor()) {
57102         throw new java.lang.IllegalArgumentException(
57103           "Service.getResponsePrototype() given method " +
57104           "descriptor for wrong service type.");
57105       }
57106       switch(method.getIndex()) {
57107         case 0:
57108           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance();
57109         case 1:
57110           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance();
57111         case 2:
57112           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance();
57113         case 3:
57114           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance();
57115         case 4:
57116           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance();
57117         case 5:
57118           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance();
57119         case 6:
57120           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance();
57121         case 7:
57122           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance();
57123         case 8:
57124           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance();
57125         case 9:
57126           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance();
57127         case 10:
57128           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance();
57129         case 11:
57130           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance();
57131         case 12:
57132           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance();
57133         case 13:
57134           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance();
57135         case 14:
57136           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance();
57137         case 15:
57138           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance();
57139         case 16:
57140           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance();
57141         case 17:
57142           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance();
57143         case 18:
57144           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance();
57145         case 19:
57146           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance();
57147         case 20:
57148           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance();
57149         case 21:
57150           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance();
57151         case 22:
57152           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance();
57153         case 23:
57154           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance();
57155         case 24:
57156           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance();
57157         case 25:
57158           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance();
57159         case 26:
57160           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance();
57161         case 27:
57162           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance();
57163         case 28:
57164           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance();
57165         case 29:
57166           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance();
57167         case 30:
57168           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
57169         case 31:
57170           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance();
57171         case 32:
57172           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance();
57173         case 33:
57174           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance();
57175         case 34:
57176           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance();
57177         case 35:
57178           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance();
57179         case 36:
57180           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance();
57181         case 37:
57182           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
57183         case 38:
57184           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance();
57185         case 39:
57186           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance();
57187         case 40:
57188           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance();
57189         case 41:
57190           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance();
57191         case 42:
57192           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance();
57193         case 43:
57194           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance();
57195         case 44:
57196           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance();
57197         case 45:
57198           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance();
57199         case 46:
57200           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance();
57201         case 47:
57202           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance();
57203         case 48:
57204           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
57205         case 49:
57206           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance();
57207         case 50:
57208           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance();
57209         case 51:
57210           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance();
57211         case 52:
57212           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance();
57213         case 53:
57214           return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance();
57215         default:
57216           throw new java.lang.AssertionError("Can't get here.");
57217       }
57218     }
57219 
57220     public static Stub newStub(
57221         com.google.protobuf.RpcChannel channel) {
57222       return new Stub(channel);
57223     }
57224 
57225     public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService implements Interface {
57226       private Stub(com.google.protobuf.RpcChannel channel) {
57227         this.channel = channel;
57228       }
57229 
57230       private final com.google.protobuf.RpcChannel channel;
57231 
57232       public com.google.protobuf.RpcChannel getChannel() {
57233         return channel;
57234       }
57235 
57236       public  void getSchemaAlterStatus(
57237           com.google.protobuf.RpcController controller,
57238           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request,
57239           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse> done) {
57240         channel.callMethod(
57241           getDescriptor().getMethods().get(0),
57242           controller,
57243           request,
57244           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance(),
57245           com.google.protobuf.RpcUtil.generalizeCallback(
57246             done,
57247             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.class,
57248             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance()));
57249       }
57250 
57251       public  void getTableDescriptors(
57252           com.google.protobuf.RpcController controller,
57253           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request,
57254           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse> done) {
57255         channel.callMethod(
57256           getDescriptor().getMethods().get(1),
57257           controller,
57258           request,
57259           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance(),
57260           com.google.protobuf.RpcUtil.generalizeCallback(
57261             done,
57262             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.class,
57263             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance()));
57264       }
57265 
57266       public  void getTableNames(
57267           com.google.protobuf.RpcController controller,
57268           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request,
57269           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse> done) {
57270         channel.callMethod(
57271           getDescriptor().getMethods().get(2),
57272           controller,
57273           request,
57274           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance(),
57275           com.google.protobuf.RpcUtil.generalizeCallback(
57276             done,
57277             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.class,
57278             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance()));
57279       }
57280 
57281       public  void getClusterStatus(
57282           com.google.protobuf.RpcController controller,
57283           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request,
57284           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse> done) {
57285         channel.callMethod(
57286           getDescriptor().getMethods().get(3),
57287           controller,
57288           request,
57289           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance(),
57290           com.google.protobuf.RpcUtil.generalizeCallback(
57291             done,
57292             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.class,
57293             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance()));
57294       }
57295 
57296       public  void isMasterRunning(
57297           com.google.protobuf.RpcController controller,
57298           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
57299           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse> done) {
57300         channel.callMethod(
57301           getDescriptor().getMethods().get(4),
57302           controller,
57303           request,
57304           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance(),
57305           com.google.protobuf.RpcUtil.generalizeCallback(
57306             done,
57307             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class,
57308             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance()));
57309       }
57310 
57311       public  void addColumn(
57312           com.google.protobuf.RpcController controller,
57313           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request,
57314           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse> done) {
57315         channel.callMethod(
57316           getDescriptor().getMethods().get(5),
57317           controller,
57318           request,
57319           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance(),
57320           com.google.protobuf.RpcUtil.generalizeCallback(
57321             done,
57322             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.class,
57323             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance()));
57324       }
57325 
57326       public  void deleteColumn(
57327           com.google.protobuf.RpcController controller,
57328           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request,
57329           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse> done) {
57330         channel.callMethod(
57331           getDescriptor().getMethods().get(6),
57332           controller,
57333           request,
57334           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance(),
57335           com.google.protobuf.RpcUtil.generalizeCallback(
57336             done,
57337             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.class,
57338             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance()));
57339       }
57340 
57341       public  void modifyColumn(
57342           com.google.protobuf.RpcController controller,
57343           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request,
57344           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse> done) {
57345         channel.callMethod(
57346           getDescriptor().getMethods().get(7),
57347           controller,
57348           request,
57349           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance(),
57350           com.google.protobuf.RpcUtil.generalizeCallback(
57351             done,
57352             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.class,
57353             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance()));
57354       }
57355 
57356       public  void moveRegion(
57357           com.google.protobuf.RpcController controller,
57358           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request,
57359           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse> done) {
57360         channel.callMethod(
57361           getDescriptor().getMethods().get(8),
57362           controller,
57363           request,
57364           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance(),
57365           com.google.protobuf.RpcUtil.generalizeCallback(
57366             done,
57367             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.class,
57368             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance()));
57369       }
57370 
57371       public  void dispatchMergingRegions(
57372           com.google.protobuf.RpcController controller,
57373           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request,
57374           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse> done) {
57375         channel.callMethod(
57376           getDescriptor().getMethods().get(9),
57377           controller,
57378           request,
57379           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance(),
57380           com.google.protobuf.RpcUtil.generalizeCallback(
57381             done,
57382             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.class,
57383             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance()));
57384       }
57385 
57386       public  void assignRegion(
57387           com.google.protobuf.RpcController controller,
57388           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request,
57389           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse> done) {
57390         channel.callMethod(
57391           getDescriptor().getMethods().get(10),
57392           controller,
57393           request,
57394           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance(),
57395           com.google.protobuf.RpcUtil.generalizeCallback(
57396             done,
57397             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.class,
57398             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance()));
57399       }
57400 
57401       public  void unassignRegion(
57402           com.google.protobuf.RpcController controller,
57403           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request,
57404           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse> done) {
57405         channel.callMethod(
57406           getDescriptor().getMethods().get(11),
57407           controller,
57408           request,
57409           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance(),
57410           com.google.protobuf.RpcUtil.generalizeCallback(
57411             done,
57412             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.class,
57413             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance()));
57414       }
57415 
57416       public  void offlineRegion(
57417           com.google.protobuf.RpcController controller,
57418           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request,
57419           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse> done) {
57420         channel.callMethod(
57421           getDescriptor().getMethods().get(12),
57422           controller,
57423           request,
57424           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance(),
57425           com.google.protobuf.RpcUtil.generalizeCallback(
57426             done,
57427             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.class,
57428             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance()));
57429       }
57430 
57431       public  void deleteTable(
57432           com.google.protobuf.RpcController controller,
57433           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request,
57434           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse> done) {
57435         channel.callMethod(
57436           getDescriptor().getMethods().get(13),
57437           controller,
57438           request,
57439           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance(),
57440           com.google.protobuf.RpcUtil.generalizeCallback(
57441             done,
57442             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.class,
57443             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance()));
57444       }
57445 
57446       public  void truncateTable(
57447           com.google.protobuf.RpcController controller,
57448           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request,
57449           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse> done) {
57450         channel.callMethod(
57451           getDescriptor().getMethods().get(14),
57452           controller,
57453           request,
57454           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance(),
57455           com.google.protobuf.RpcUtil.generalizeCallback(
57456             done,
57457             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.class,
57458             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance()));
57459       }
57460 
57461       public  void enableTable(
57462           com.google.protobuf.RpcController controller,
57463           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request,
57464           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse> done) {
57465         channel.callMethod(
57466           getDescriptor().getMethods().get(15),
57467           controller,
57468           request,
57469           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance(),
57470           com.google.protobuf.RpcUtil.generalizeCallback(
57471             done,
57472             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.class,
57473             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance()));
57474       }
57475 
57476       public  void disableTable(
57477           com.google.protobuf.RpcController controller,
57478           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request,
57479           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse> done) {
57480         channel.callMethod(
57481           getDescriptor().getMethods().get(16),
57482           controller,
57483           request,
57484           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance(),
57485           com.google.protobuf.RpcUtil.generalizeCallback(
57486             done,
57487             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.class,
57488             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance()));
57489       }
57490 
57491       public  void modifyTable(
57492           com.google.protobuf.RpcController controller,
57493           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request,
57494           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse> done) {
57495         channel.callMethod(
57496           getDescriptor().getMethods().get(17),
57497           controller,
57498           request,
57499           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance(),
57500           com.google.protobuf.RpcUtil.generalizeCallback(
57501             done,
57502             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.class,
57503             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance()));
57504       }
57505 
57506       public  void createTable(
57507           com.google.protobuf.RpcController controller,
57508           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request,
57509           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse> done) {
57510         channel.callMethod(
57511           getDescriptor().getMethods().get(18),
57512           controller,
57513           request,
57514           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance(),
57515           com.google.protobuf.RpcUtil.generalizeCallback(
57516             done,
57517             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.class,
57518             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance()));
57519       }
57520 
57521       public  void shutdown(
57522           com.google.protobuf.RpcController controller,
57523           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request,
57524           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse> done) {
57525         channel.callMethod(
57526           getDescriptor().getMethods().get(19),
57527           controller,
57528           request,
57529           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance(),
57530           com.google.protobuf.RpcUtil.generalizeCallback(
57531             done,
57532             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.class,
57533             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance()));
57534       }
57535 
57536       public  void stopMaster(
57537           com.google.protobuf.RpcController controller,
57538           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request,
57539           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse> done) {
57540         channel.callMethod(
57541           getDescriptor().getMethods().get(20),
57542           controller,
57543           request,
57544           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance(),
57545           com.google.protobuf.RpcUtil.generalizeCallback(
57546             done,
57547             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.class,
57548             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance()));
57549       }
57550 
57551       public  void balance(
57552           com.google.protobuf.RpcController controller,
57553           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request,
57554           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse> done) {
57555         channel.callMethod(
57556           getDescriptor().getMethods().get(21),
57557           controller,
57558           request,
57559           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance(),
57560           com.google.protobuf.RpcUtil.generalizeCallback(
57561             done,
57562             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.class,
57563             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance()));
57564       }
57565 
57566       public  void setBalancerRunning(
57567           com.google.protobuf.RpcController controller,
57568           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request,
57569           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse> done) {
57570         channel.callMethod(
57571           getDescriptor().getMethods().get(22),
57572           controller,
57573           request,
57574           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance(),
57575           com.google.protobuf.RpcUtil.generalizeCallback(
57576             done,
57577             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.class,
57578             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance()));
57579       }
57580 
57581       public  void isBalancerEnabled(
57582           com.google.protobuf.RpcController controller,
57583           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request,
57584           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse> done) {
57585         channel.callMethod(
57586           getDescriptor().getMethods().get(23),
57587           controller,
57588           request,
57589           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance(),
57590           com.google.protobuf.RpcUtil.generalizeCallback(
57591             done,
57592             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.class,
57593             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance()));
57594       }
57595 
57596       public  void normalize(
57597           com.google.protobuf.RpcController controller,
57598           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request,
57599           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse> done) {
57600         channel.callMethod(
57601           getDescriptor().getMethods().get(24),
57602           controller,
57603           request,
57604           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance(),
57605           com.google.protobuf.RpcUtil.generalizeCallback(
57606             done,
57607             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.class,
57608             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance()));
57609       }
57610 
57611       public  void setNormalizerRunning(
57612           com.google.protobuf.RpcController controller,
57613           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request,
57614           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse> done) {
57615         channel.callMethod(
57616           getDescriptor().getMethods().get(25),
57617           controller,
57618           request,
57619           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance(),
57620           com.google.protobuf.RpcUtil.generalizeCallback(
57621             done,
57622             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.class,
57623             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance()));
57624       }
57625 
57626       public  void isNormalizerEnabled(
57627           com.google.protobuf.RpcController controller,
57628           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request,
57629           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse> done) {
57630         channel.callMethod(
57631           getDescriptor().getMethods().get(26),
57632           controller,
57633           request,
57634           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance(),
57635           com.google.protobuf.RpcUtil.generalizeCallback(
57636             done,
57637             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.class,
57638             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance()));
57639       }
57640 
57641       public  void runCatalogScan(
57642           com.google.protobuf.RpcController controller,
57643           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request,
57644           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse> done) {
57645         channel.callMethod(
57646           getDescriptor().getMethods().get(27),
57647           controller,
57648           request,
57649           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance(),
57650           com.google.protobuf.RpcUtil.generalizeCallback(
57651             done,
57652             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.class,
57653             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance()));
57654       }
57655 
57656       public  void enableCatalogJanitor(
57657           com.google.protobuf.RpcController controller,
57658           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request,
57659           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse> done) {
57660         channel.callMethod(
57661           getDescriptor().getMethods().get(28),
57662           controller,
57663           request,
57664           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance(),
57665           com.google.protobuf.RpcUtil.generalizeCallback(
57666             done,
57667             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.class,
57668             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance()));
57669       }
57670 
57671       public  void isCatalogJanitorEnabled(
57672           com.google.protobuf.RpcController controller,
57673           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request,
57674           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse> done) {
57675         channel.callMethod(
57676           getDescriptor().getMethods().get(29),
57677           controller,
57678           request,
57679           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance(),
57680           com.google.protobuf.RpcUtil.generalizeCallback(
57681             done,
57682             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.class,
57683             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance()));
57684       }
57685 
57686       public  void execMasterService(
57687           com.google.protobuf.RpcController controller,
57688           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
57689           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
57690         channel.callMethod(
57691           getDescriptor().getMethods().get(30),
57692           controller,
57693           request,
57694           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
57695           com.google.protobuf.RpcUtil.generalizeCallback(
57696             done,
57697             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
57698             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
57699       }
57700 
57701       public  void snapshot(
57702           com.google.protobuf.RpcController controller,
57703           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request,
57704           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse> done) {
57705         channel.callMethod(
57706           getDescriptor().getMethods().get(31),
57707           controller,
57708           request,
57709           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance(),
57710           com.google.protobuf.RpcUtil.generalizeCallback(
57711             done,
57712             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.class,
57713             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance()));
57714       }
57715 
57716       public  void getCompletedSnapshots(
57717           com.google.protobuf.RpcController controller,
57718           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request,
57719           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse> done) {
57720         channel.callMethod(
57721           getDescriptor().getMethods().get(32),
57722           controller,
57723           request,
57724           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance(),
57725           com.google.protobuf.RpcUtil.generalizeCallback(
57726             done,
57727             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.class,
57728             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance()));
57729       }
57730 
57731       public  void deleteSnapshot(
57732           com.google.protobuf.RpcController controller,
57733           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request,
57734           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse> done) {
57735         channel.callMethod(
57736           getDescriptor().getMethods().get(33),
57737           controller,
57738           request,
57739           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance(),
57740           com.google.protobuf.RpcUtil.generalizeCallback(
57741             done,
57742             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.class,
57743             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance()));
57744       }
57745 
57746       public  void isSnapshotDone(
57747           com.google.protobuf.RpcController controller,
57748           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request,
57749           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse> done) {
57750         channel.callMethod(
57751           getDescriptor().getMethods().get(34),
57752           controller,
57753           request,
57754           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance(),
57755           com.google.protobuf.RpcUtil.generalizeCallback(
57756             done,
57757             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.class,
57758             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance()));
57759       }
57760 
57761       public  void restoreSnapshot(
57762           com.google.protobuf.RpcController controller,
57763           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request,
57764           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse> done) {
57765         channel.callMethod(
57766           getDescriptor().getMethods().get(35),
57767           controller,
57768           request,
57769           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance(),
57770           com.google.protobuf.RpcUtil.generalizeCallback(
57771             done,
57772             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.class,
57773             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance()));
57774       }
57775 
57776       public  void isRestoreSnapshotDone(
57777           com.google.protobuf.RpcController controller,
57778           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request,
57779           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse> done) {
57780         channel.callMethod(
57781           getDescriptor().getMethods().get(36),
57782           controller,
57783           request,
57784           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance(),
57785           com.google.protobuf.RpcUtil.generalizeCallback(
57786             done,
57787             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.class,
57788             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance()));
57789       }
57790 
57791       public  void execProcedure(
57792           com.google.protobuf.RpcController controller,
57793           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
57794           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) {
57795         channel.callMethod(
57796           getDescriptor().getMethods().get(37),
57797           controller,
57798           request,
57799           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(),
57800           com.google.protobuf.RpcUtil.generalizeCallback(
57801             done,
57802             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class,
57803             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()));
57804       }
57805 
57806       public  void execProcedureWithRet(
57807           com.google.protobuf.RpcController controller,
57808           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request,
57809           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse> done) {
57810         channel.callMethod(
57811           getDescriptor().getMethods().get(38),
57812           controller,
57813           request,
57814           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance(),
57815           com.google.protobuf.RpcUtil.generalizeCallback(
57816             done,
57817             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.class,
57818             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance()));
57819       }
57820 
57821       public  void isProcedureDone(
57822           com.google.protobuf.RpcController controller,
57823           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request,
57824           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse> done) {
57825         channel.callMethod(
57826           getDescriptor().getMethods().get(39),
57827           controller,
57828           request,
57829           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance(),
57830           com.google.protobuf.RpcUtil.generalizeCallback(
57831             done,
57832             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.class,
57833             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance()));
57834       }
57835 
57836       public  void modifyNamespace(
57837           com.google.protobuf.RpcController controller,
57838           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request,
57839           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse> done) {
57840         channel.callMethod(
57841           getDescriptor().getMethods().get(40),
57842           controller,
57843           request,
57844           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance(),
57845           com.google.protobuf.RpcUtil.generalizeCallback(
57846             done,
57847             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.class,
57848             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance()));
57849       }
57850 
57851       public  void createNamespace(
57852           com.google.protobuf.RpcController controller,
57853           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request,
57854           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse> done) {
57855         channel.callMethod(
57856           getDescriptor().getMethods().get(41),
57857           controller,
57858           request,
57859           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance(),
57860           com.google.protobuf.RpcUtil.generalizeCallback(
57861             done,
57862             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.class,
57863             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance()));
57864       }
57865 
57866       public  void deleteNamespace(
57867           com.google.protobuf.RpcController controller,
57868           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request,
57869           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse> done) {
57870         channel.callMethod(
57871           getDescriptor().getMethods().get(42),
57872           controller,
57873           request,
57874           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance(),
57875           com.google.protobuf.RpcUtil.generalizeCallback(
57876             done,
57877             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.class,
57878             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance()));
57879       }
57880 
57881       public  void getNamespaceDescriptor(
57882           com.google.protobuf.RpcController controller,
57883           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request,
57884           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse> done) {
57885         channel.callMethod(
57886           getDescriptor().getMethods().get(43),
57887           controller,
57888           request,
57889           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance(),
57890           com.google.protobuf.RpcUtil.generalizeCallback(
57891             done,
57892             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.class,
57893             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance()));
57894       }
57895 
57896       public  void listNamespaceDescriptors(
57897           com.google.protobuf.RpcController controller,
57898           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request,
57899           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse> done) {
57900         channel.callMethod(
57901           getDescriptor().getMethods().get(44),
57902           controller,
57903           request,
57904           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance(),
57905           com.google.protobuf.RpcUtil.generalizeCallback(
57906             done,
57907             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.class,
57908             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance()));
57909       }
57910 
57911       public  void listTableDescriptorsByNamespace(
57912           com.google.protobuf.RpcController controller,
57913           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request,
57914           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse> done) {
57915         channel.callMethod(
57916           getDescriptor().getMethods().get(45),
57917           controller,
57918           request,
57919           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance(),
57920           com.google.protobuf.RpcUtil.generalizeCallback(
57921             done,
57922             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.class,
57923             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance()));
57924       }
57925 
57926       public  void listTableNamesByNamespace(
57927           com.google.protobuf.RpcController controller,
57928           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request,
57929           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse> done) {
57930         channel.callMethod(
57931           getDescriptor().getMethods().get(46),
57932           controller,
57933           request,
57934           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance(),
57935           com.google.protobuf.RpcUtil.generalizeCallback(
57936             done,
57937             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.class,
57938             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance()));
57939       }
57940 
57941       public  void setQuota(
57942           com.google.protobuf.RpcController controller,
57943           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request,
57944           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse> done) {
57945         channel.callMethod(
57946           getDescriptor().getMethods().get(47),
57947           controller,
57948           request,
57949           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance(),
57950           com.google.protobuf.RpcUtil.generalizeCallback(
57951             done,
57952             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.class,
57953             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance()));
57954       }
57955 
57956       public  void getLastMajorCompactionTimestamp(
57957           com.google.protobuf.RpcController controller,
57958           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request,
57959           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done) {
57960         channel.callMethod(
57961           getDescriptor().getMethods().get(48),
57962           controller,
57963           request,
57964           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(),
57965           com.google.protobuf.RpcUtil.generalizeCallback(
57966             done,
57967             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.class,
57968             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance()));
57969       }
57970 
57971       public  void getLastMajorCompactionTimestampForRegion(
57972           com.google.protobuf.RpcController controller,
57973           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request,
57974           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse> done) {
57975         channel.callMethod(
57976           getDescriptor().getMethods().get(49),
57977           controller,
57978           request,
57979           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance(),
57980           com.google.protobuf.RpcUtil.generalizeCallback(
57981             done,
57982             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.class,
57983             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance()));
57984       }
57985 
57986       public  void getProcedureResult(
57987           com.google.protobuf.RpcController controller,
57988           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request,
57989           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse> done) {
57990         channel.callMethod(
57991           getDescriptor().getMethods().get(50),
57992           controller,
57993           request,
57994           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance(),
57995           com.google.protobuf.RpcUtil.generalizeCallback(
57996             done,
57997             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.class,
57998             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance()));
57999       }
58000 
58001       public  void getSecurityCapabilities(
58002           com.google.protobuf.RpcController controller,
58003           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request,
58004           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse> done) {
58005         channel.callMethod(
58006           getDescriptor().getMethods().get(51),
58007           controller,
58008           request,
58009           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance(),
58010           com.google.protobuf.RpcUtil.generalizeCallback(
58011             done,
58012             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.class,
58013             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance()));
58014       }
58015 
58016       public  void abortProcedure(
58017           com.google.protobuf.RpcController controller,
58018           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request,
58019           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse> done) {
58020         channel.callMethod(
58021           getDescriptor().getMethods().get(52),
58022           controller,
58023           request,
58024           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance(),
58025           com.google.protobuf.RpcUtil.generalizeCallback(
58026             done,
58027             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.class,
58028             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance()));
58029       }
58030 
58031       public  void listProcedures(
58032           com.google.protobuf.RpcController controller,
58033           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request,
58034           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse> done) {
58035         channel.callMethod(
58036           getDescriptor().getMethods().get(53),
58037           controller,
58038           request,
58039           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance(),
58040           com.google.protobuf.RpcUtil.generalizeCallback(
58041             done,
58042             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.class,
58043             org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance()));
58044       }
58045     }
58046 
58047     public static BlockingInterface newBlockingStub(
58048         com.google.protobuf.BlockingRpcChannel channel) {
58049       return new BlockingStub(channel);
58050     }
58051 
58052     public interface BlockingInterface {
58053       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus(
58054           com.google.protobuf.RpcController controller,
58055           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request)
58056           throws com.google.protobuf.ServiceException;
58057 
58058       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors(
58059           com.google.protobuf.RpcController controller,
58060           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request)
58061           throws com.google.protobuf.ServiceException;
58062 
58063       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse getTableNames(
58064           com.google.protobuf.RpcController controller,
58065           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request)
58066           throws com.google.protobuf.ServiceException;
58067 
58068       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse getClusterStatus(
58069           com.google.protobuf.RpcController controller,
58070           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request)
58071           throws com.google.protobuf.ServiceException;
58072 
58073       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning(
58074           com.google.protobuf.RpcController controller,
58075           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request)
58076           throws com.google.protobuf.ServiceException;
58077 
58078       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse addColumn(
58079           com.google.protobuf.RpcController controller,
58080           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request)
58081           throws com.google.protobuf.ServiceException;
58082 
58083       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse deleteColumn(
58084           com.google.protobuf.RpcController controller,
58085           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request)
58086           throws com.google.protobuf.ServiceException;
58087 
58088       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse modifyColumn(
58089           com.google.protobuf.RpcController controller,
58090           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request)
58091           throws com.google.protobuf.ServiceException;
58092 
58093       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse moveRegion(
58094           com.google.protobuf.RpcController controller,
58095           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request)
58096           throws com.google.protobuf.ServiceException;
58097 
58098       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse dispatchMergingRegions(
58099           com.google.protobuf.RpcController controller,
58100           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request)
58101           throws com.google.protobuf.ServiceException;
58102 
58103       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse assignRegion(
58104           com.google.protobuf.RpcController controller,
58105           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request)
58106           throws com.google.protobuf.ServiceException;
58107 
58108       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse unassignRegion(
58109           com.google.protobuf.RpcController controller,
58110           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request)
58111           throws com.google.protobuf.ServiceException;
58112 
58113       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse offlineRegion(
58114           com.google.protobuf.RpcController controller,
58115           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request)
58116           throws com.google.protobuf.ServiceException;
58117 
58118       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse deleteTable(
58119           com.google.protobuf.RpcController controller,
58120           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request)
58121           throws com.google.protobuf.ServiceException;
58122 
58123       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse truncateTable(
58124           com.google.protobuf.RpcController controller,
58125           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request)
58126           throws com.google.protobuf.ServiceException;
58127 
58128       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse enableTable(
58129           com.google.protobuf.RpcController controller,
58130           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request)
58131           throws com.google.protobuf.ServiceException;
58132 
58133       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse disableTable(
58134           com.google.protobuf.RpcController controller,
58135           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request)
58136           throws com.google.protobuf.ServiceException;
58137 
58138       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse modifyTable(
58139           com.google.protobuf.RpcController controller,
58140           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request)
58141           throws com.google.protobuf.ServiceException;
58142 
58143       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse createTable(
58144           com.google.protobuf.RpcController controller,
58145           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request)
58146           throws com.google.protobuf.ServiceException;
58147 
58148       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse shutdown(
58149           com.google.protobuf.RpcController controller,
58150           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request)
58151           throws com.google.protobuf.ServiceException;
58152 
58153       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse stopMaster(
58154           com.google.protobuf.RpcController controller,
58155           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request)
58156           throws com.google.protobuf.ServiceException;
58157 
58158       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse balance(
58159           com.google.protobuf.RpcController controller,
58160           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request)
58161           throws com.google.protobuf.ServiceException;
58162 
58163       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse setBalancerRunning(
58164           com.google.protobuf.RpcController controller,
58165           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request)
58166           throws com.google.protobuf.ServiceException;
58167 
58168       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse isBalancerEnabled(
58169           com.google.protobuf.RpcController controller,
58170           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request)
58171           throws com.google.protobuf.ServiceException;
58172 
58173       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse normalize(
58174           com.google.protobuf.RpcController controller,
58175           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request)
58176           throws com.google.protobuf.ServiceException;
58177 
58178       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse setNormalizerRunning(
58179           com.google.protobuf.RpcController controller,
58180           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request)
58181           throws com.google.protobuf.ServiceException;
58182 
58183       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse isNormalizerEnabled(
58184           com.google.protobuf.RpcController controller,
58185           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request)
58186           throws com.google.protobuf.ServiceException;
58187 
58188       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse runCatalogScan(
58189           com.google.protobuf.RpcController controller,
58190           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request)
58191           throws com.google.protobuf.ServiceException;
58192 
58193       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse enableCatalogJanitor(
58194           com.google.protobuf.RpcController controller,
58195           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request)
58196           throws com.google.protobuf.ServiceException;
58197 
58198       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled(
58199           com.google.protobuf.RpcController controller,
58200           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request)
58201           throws com.google.protobuf.ServiceException;
58202 
58203       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService(
58204           com.google.protobuf.RpcController controller,
58205           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
58206           throws com.google.protobuf.ServiceException;
58207 
58208       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse snapshot(
58209           com.google.protobuf.RpcController controller,
58210           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request)
58211           throws com.google.protobuf.ServiceException;
58212 
58213       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getCompletedSnapshots(
58214           com.google.protobuf.RpcController controller,
58215           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request)
58216           throws com.google.protobuf.ServiceException;
58217 
58218       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse deleteSnapshot(
58219           com.google.protobuf.RpcController controller,
58220           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request)
58221           throws com.google.protobuf.ServiceException;
58222 
58223       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse isSnapshotDone(
58224           com.google.protobuf.RpcController controller,
58225           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request)
58226           throws com.google.protobuf.ServiceException;
58227 
58228       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse restoreSnapshot(
58229           com.google.protobuf.RpcController controller,
58230           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request)
58231           throws com.google.protobuf.ServiceException;
58232 
58233       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse isRestoreSnapshotDone(
58234           com.google.protobuf.RpcController controller,
58235           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request)
58236           throws com.google.protobuf.ServiceException;
58237 
58238       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedure(
58239           com.google.protobuf.RpcController controller,
58240           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
58241           throws com.google.protobuf.ServiceException;
58242 
58243       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedureWithRet(
58244           com.google.protobuf.RpcController controller,
58245           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
58246           throws com.google.protobuf.ServiceException;
58247 
58248       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse isProcedureDone(
58249           com.google.protobuf.RpcController controller,
58250           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request)
58251           throws com.google.protobuf.ServiceException;
58252 
58253       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse modifyNamespace(
58254           com.google.protobuf.RpcController controller,
58255           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request)
58256           throws com.google.protobuf.ServiceException;
58257 
58258       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse createNamespace(
58259           com.google.protobuf.RpcController controller,
58260           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request)
58261           throws com.google.protobuf.ServiceException;
58262 
58263       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse deleteNamespace(
58264           com.google.protobuf.RpcController controller,
58265           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request)
58266           throws com.google.protobuf.ServiceException;
58267 
58268       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getNamespaceDescriptor(
58269           com.google.protobuf.RpcController controller,
58270           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request)
58271           throws com.google.protobuf.ServiceException;
58272 
58273       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse listNamespaceDescriptors(
58274           com.google.protobuf.RpcController controller,
58275           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request)
58276           throws com.google.protobuf.ServiceException;
58277 
58278       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace(
58279           com.google.protobuf.RpcController controller,
58280           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request)
58281           throws com.google.protobuf.ServiceException;
58282 
58283       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse listTableNamesByNamespace(
58284           com.google.protobuf.RpcController controller,
58285           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request)
58286           throws com.google.protobuf.ServiceException;
58287 
58288       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse setQuota(
58289           com.google.protobuf.RpcController controller,
58290           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request)
58291           throws com.google.protobuf.ServiceException;
58292 
58293       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getLastMajorCompactionTimestamp(
58294           com.google.protobuf.RpcController controller,
58295           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request)
58296           throws com.google.protobuf.ServiceException;
58297 
58298       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getLastMajorCompactionTimestampForRegion(
58299           com.google.protobuf.RpcController controller,
58300           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request)
58301           throws com.google.protobuf.ServiceException;
58302 
58303       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse getProcedureResult(
58304           com.google.protobuf.RpcController controller,
58305           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request)
58306           throws com.google.protobuf.ServiceException;
58307 
58308       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getSecurityCapabilities(
58309           com.google.protobuf.RpcController controller,
58310           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request)
58311           throws com.google.protobuf.ServiceException;
58312 
58313       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse abortProcedure(
58314           com.google.protobuf.RpcController controller,
58315           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request)
58316           throws com.google.protobuf.ServiceException;
58317 
58318       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse listProcedures(
58319           com.google.protobuf.RpcController controller,
58320           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
58321           throws com.google.protobuf.ServiceException;
58322     }
58323 
58324     private static final class BlockingStub implements BlockingInterface {
58325       private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
58326         this.channel = channel;
58327       }
58328 
58329       private final com.google.protobuf.BlockingRpcChannel channel;
58330 
58331       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse getSchemaAlterStatus(
58332           com.google.protobuf.RpcController controller,
58333           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest request)
58334           throws com.google.protobuf.ServiceException {
58335         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) channel.callBlockingMethod(
58336           getDescriptor().getMethods().get(0),
58337           controller,
58338           request,
58339           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse.getDefaultInstance());
58340       }
58341 
58342 
58343       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse getTableDescriptors(
58344           com.google.protobuf.RpcController controller,
58345           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest request)
58346           throws com.google.protobuf.ServiceException {
58347         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) channel.callBlockingMethod(
58348           getDescriptor().getMethods().get(1),
58349           controller,
58350           request,
58351           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse.getDefaultInstance());
58352       }
58353 
58354 
58355       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse getTableNames(
58356           com.google.protobuf.RpcController controller,
58357           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest request)
58358           throws com.google.protobuf.ServiceException {
58359         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse) channel.callBlockingMethod(
58360           getDescriptor().getMethods().get(2),
58361           controller,
58362           request,
58363           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesResponse.getDefaultInstance());
58364       }
58365 
58366 
58367       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse getClusterStatus(
58368           com.google.protobuf.RpcController controller,
58369           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest request)
58370           throws com.google.protobuf.ServiceException {
58371         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse) channel.callBlockingMethod(
58372           getDescriptor().getMethods().get(3),
58373           controller,
58374           request,
58375           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusResponse.getDefaultInstance());
58376       }
58377 
58378 
58379       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse isMasterRunning(
58380           com.google.protobuf.RpcController controller,
58381           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request)
58382           throws com.google.protobuf.ServiceException {
58383         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) channel.callBlockingMethod(
58384           getDescriptor().getMethods().get(4),
58385           controller,
58386           request,
58387           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDefaultInstance());
58388       }
58389 
58390 
58391       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse addColumn(
58392           com.google.protobuf.RpcController controller,
58393           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest request)
58394           throws com.google.protobuf.ServiceException {
58395         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse) channel.callBlockingMethod(
58396           getDescriptor().getMethods().get(5),
58397           controller,
58398           request,
58399           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse.getDefaultInstance());
58400       }
58401 
58402 
58403       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse deleteColumn(
58404           com.google.protobuf.RpcController controller,
58405           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest request)
58406           throws com.google.protobuf.ServiceException {
58407         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse) channel.callBlockingMethod(
58408           getDescriptor().getMethods().get(6),
58409           controller,
58410           request,
58411           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse.getDefaultInstance());
58412       }
58413 
58414 
58415       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse modifyColumn(
58416           com.google.protobuf.RpcController controller,
58417           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest request)
58418           throws com.google.protobuf.ServiceException {
58419         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse) channel.callBlockingMethod(
58420           getDescriptor().getMethods().get(7),
58421           controller,
58422           request,
58423           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse.getDefaultInstance());
58424       }
58425 
58426 
58427       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse moveRegion(
58428           com.google.protobuf.RpcController controller,
58429           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest request)
58430           throws com.google.protobuf.ServiceException {
58431         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse) channel.callBlockingMethod(
58432           getDescriptor().getMethods().get(8),
58433           controller,
58434           request,
58435           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionResponse.getDefaultInstance());
58436       }
58437 
58438 
58439       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse dispatchMergingRegions(
58440           com.google.protobuf.RpcController controller,
58441           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest request)
58442           throws com.google.protobuf.ServiceException {
58443         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse) channel.callBlockingMethod(
58444           getDescriptor().getMethods().get(9),
58445           controller,
58446           request,
58447           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse.getDefaultInstance());
58448       }
58449 
58450 
58451       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse assignRegion(
58452           com.google.protobuf.RpcController controller,
58453           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest request)
58454           throws com.google.protobuf.ServiceException {
58455         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse) channel.callBlockingMethod(
58456           getDescriptor().getMethods().get(10),
58457           controller,
58458           request,
58459           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionResponse.getDefaultInstance());
58460       }
58461 
58462 
58463       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse unassignRegion(
58464           com.google.protobuf.RpcController controller,
58465           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest request)
58466           throws com.google.protobuf.ServiceException {
58467         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse) channel.callBlockingMethod(
58468           getDescriptor().getMethods().get(11),
58469           controller,
58470           request,
58471           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse.getDefaultInstance());
58472       }
58473 
58474 
58475       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse offlineRegion(
58476           com.google.protobuf.RpcController controller,
58477           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionRequest request)
58478           throws com.google.protobuf.ServiceException {
58479         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse) channel.callBlockingMethod(
58480           getDescriptor().getMethods().get(12),
58481           controller,
58482           request,
58483           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.OfflineRegionResponse.getDefaultInstance());
58484       }
58485 
58486 
58487       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse deleteTable(
58488           com.google.protobuf.RpcController controller,
58489           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest request)
58490           throws com.google.protobuf.ServiceException {
58491         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse) channel.callBlockingMethod(
58492           getDescriptor().getMethods().get(13),
58493           controller,
58494           request,
58495           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse.getDefaultInstance());
58496       }
58497 
58498 
58499       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse truncateTable(
58500           com.google.protobuf.RpcController controller,
58501           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest request)
58502           throws com.google.protobuf.ServiceException {
58503         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse) channel.callBlockingMethod(
58504           getDescriptor().getMethods().get(14),
58505           controller,
58506           request,
58507           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse.getDefaultInstance());
58508       }
58509 
58510 
58511       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse enableTable(
58512           com.google.protobuf.RpcController controller,
58513           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest request)
58514           throws com.google.protobuf.ServiceException {
58515         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse) channel.callBlockingMethod(
58516           getDescriptor().getMethods().get(15),
58517           controller,
58518           request,
58519           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse.getDefaultInstance());
58520       }
58521 
58522 
58523       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse disableTable(
58524           com.google.protobuf.RpcController controller,
58525           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest request)
58526           throws com.google.protobuf.ServiceException {
58527         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse) channel.callBlockingMethod(
58528           getDescriptor().getMethods().get(16),
58529           controller,
58530           request,
58531           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse.getDefaultInstance());
58532       }
58533 
58534 
58535       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse modifyTable(
58536           com.google.protobuf.RpcController controller,
58537           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest request)
58538           throws com.google.protobuf.ServiceException {
58539         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse) channel.callBlockingMethod(
58540           getDescriptor().getMethods().get(17),
58541           controller,
58542           request,
58543           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse.getDefaultInstance());
58544       }
58545 
58546 
58547       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse createTable(
58548           com.google.protobuf.RpcController controller,
58549           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest request)
58550           throws com.google.protobuf.ServiceException {
58551         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse) channel.callBlockingMethod(
58552           getDescriptor().getMethods().get(18),
58553           controller,
58554           request,
58555           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse.getDefaultInstance());
58556       }
58557 
58558 
58559       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse shutdown(
58560           com.google.protobuf.RpcController controller,
58561           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest request)
58562           throws com.google.protobuf.ServiceException {
58563         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse) channel.callBlockingMethod(
58564           getDescriptor().getMethods().get(19),
58565           controller,
58566           request,
58567           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownResponse.getDefaultInstance());
58568       }
58569 
58570 
58571       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse stopMaster(
58572           com.google.protobuf.RpcController controller,
58573           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest request)
58574           throws com.google.protobuf.ServiceException {
58575         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse) channel.callBlockingMethod(
58576           getDescriptor().getMethods().get(20),
58577           controller,
58578           request,
58579           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterResponse.getDefaultInstance());
58580       }
58581 
58582 
58583       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse balance(
58584           com.google.protobuf.RpcController controller,
58585           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceRequest request)
58586           throws com.google.protobuf.ServiceException {
58587         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse) channel.callBlockingMethod(
58588           getDescriptor().getMethods().get(21),
58589           controller,
58590           request,
58591           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.BalanceResponse.getDefaultInstance());
58592       }
58593 
58594 
58595       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse setBalancerRunning(
58596           com.google.protobuf.RpcController controller,
58597           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest request)
58598           throws com.google.protobuf.ServiceException {
58599         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse) channel.callBlockingMethod(
58600           getDescriptor().getMethods().get(22),
58601           controller,
58602           request,
58603           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningResponse.getDefaultInstance());
58604       }
58605 
58606 
58607       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse isBalancerEnabled(
58608           com.google.protobuf.RpcController controller,
58609           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest request)
58610           throws com.google.protobuf.ServiceException {
58611         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) channel.callBlockingMethod(
58612           getDescriptor().getMethods().get(23),
58613           controller,
58614           request,
58615           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse.getDefaultInstance());
58616       }
58617 
58618 
58619       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse normalize(
58620           com.google.protobuf.RpcController controller,
58621           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest request)
58622           throws com.google.protobuf.ServiceException {
58623         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse) channel.callBlockingMethod(
58624           getDescriptor().getMethods().get(24),
58625           controller,
58626           request,
58627           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse.getDefaultInstance());
58628       }
58629 
58630 
58631       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse setNormalizerRunning(
58632           com.google.protobuf.RpcController controller,
58633           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest request)
58634           throws com.google.protobuf.ServiceException {
58635         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse) channel.callBlockingMethod(
58636           getDescriptor().getMethods().get(25),
58637           controller,
58638           request,
58639           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse.getDefaultInstance());
58640       }
58641 
58642 
58643       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse isNormalizerEnabled(
58644           com.google.protobuf.RpcController controller,
58645           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest request)
58646           throws com.google.protobuf.ServiceException {
58647         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse) channel.callBlockingMethod(
58648           getDescriptor().getMethods().get(26),
58649           controller,
58650           request,
58651           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse.getDefaultInstance());
58652       }
58653 
58654 
58655       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse runCatalogScan(
58656           com.google.protobuf.RpcController controller,
58657           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanRequest request)
58658           throws com.google.protobuf.ServiceException {
58659         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse) channel.callBlockingMethod(
58660           getDescriptor().getMethods().get(27),
58661           controller,
58662           request,
58663           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RunCatalogScanResponse.getDefaultInstance());
58664       }
58665 
58666 
58667       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse enableCatalogJanitor(
58668           com.google.protobuf.RpcController controller,
58669           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorRequest request)
58670           throws com.google.protobuf.ServiceException {
58671         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse) channel.callBlockingMethod(
58672           getDescriptor().getMethods().get(28),
58673           controller,
58674           request,
58675           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableCatalogJanitorResponse.getDefaultInstance());
58676       }
58677 
58678 
58679       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse isCatalogJanitorEnabled(
58680           com.google.protobuf.RpcController controller,
58681           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledRequest request)
58682           throws com.google.protobuf.ServiceException {
58683         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse) channel.callBlockingMethod(
58684           getDescriptor().getMethods().get(29),
58685           controller,
58686           request,
58687           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsCatalogJanitorEnabledResponse.getDefaultInstance());
58688       }
58689 
58690 
58691       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execMasterService(
58692           com.google.protobuf.RpcController controller,
58693           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
58694           throws com.google.protobuf.ServiceException {
58695         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
58696           getDescriptor().getMethods().get(30),
58697           controller,
58698           request,
58699           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
58700       }
58701 
58702 
58703       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse snapshot(
58704           com.google.protobuf.RpcController controller,
58705           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest request)
58706           throws com.google.protobuf.ServiceException {
58707         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse) channel.callBlockingMethod(
58708           getDescriptor().getMethods().get(31),
58709           controller,
58710           request,
58711           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse.getDefaultInstance());
58712       }
58713 
58714 
58715       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse getCompletedSnapshots(
58716           com.google.protobuf.RpcController controller,
58717           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest request)
58718           throws com.google.protobuf.ServiceException {
58719         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse) channel.callBlockingMethod(
58720           getDescriptor().getMethods().get(32),
58721           controller,
58722           request,
58723           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsResponse.getDefaultInstance());
58724       }
58725 
58726 
58727       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse deleteSnapshot(
58728           com.google.protobuf.RpcController controller,
58729           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest request)
58730           throws com.google.protobuf.ServiceException {
58731         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse) channel.callBlockingMethod(
58732           getDescriptor().getMethods().get(33),
58733           controller,
58734           request,
58735           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotResponse.getDefaultInstance());
58736       }
58737 
58738 
58739       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse isSnapshotDone(
58740           com.google.protobuf.RpcController controller,
58741           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest request)
58742           throws com.google.protobuf.ServiceException {
58743         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse) channel.callBlockingMethod(
58744           getDescriptor().getMethods().get(34),
58745           controller,
58746           request,
58747           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse.getDefaultInstance());
58748       }
58749 
58750 
58751       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse restoreSnapshot(
58752           com.google.protobuf.RpcController controller,
58753           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest request)
58754           throws com.google.protobuf.ServiceException {
58755         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse) channel.callBlockingMethod(
58756           getDescriptor().getMethods().get(35),
58757           controller,
58758           request,
58759           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse.getDefaultInstance());
58760       }
58761 
58762 
58763       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse isRestoreSnapshotDone(
58764           com.google.protobuf.RpcController controller,
58765           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneRequest request)
58766           throws com.google.protobuf.ServiceException {
58767         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse) channel.callBlockingMethod(
58768           getDescriptor().getMethods().get(36),
58769           controller,
58770           request,
58771           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsRestoreSnapshotDoneResponse.getDefaultInstance());
58772       }
58773 
58774 
58775       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedure(
58776           com.google.protobuf.RpcController controller,
58777           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
58778           throws com.google.protobuf.ServiceException {
58779         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod(
58780           getDescriptor().getMethods().get(37),
58781           controller,
58782           request,
58783           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance());
58784       }
58785 
58786 
58787       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse execProcedureWithRet(
58788           com.google.protobuf.RpcController controller,
58789           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest request)
58790           throws com.google.protobuf.ServiceException {
58791         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse) channel.callBlockingMethod(
58792           getDescriptor().getMethods().get(38),
58793           controller,
58794           request,
58795           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse.getDefaultInstance());
58796       }
58797 
58798 
58799       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse isProcedureDone(
58800           com.google.protobuf.RpcController controller,
58801           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest request)
58802           throws com.google.protobuf.ServiceException {
58803         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse) channel.callBlockingMethod(
58804           getDescriptor().getMethods().get(39),
58805           controller,
58806           request,
58807           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse.getDefaultInstance());
58808       }
58809 
58810 
58811       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse modifyNamespace(
58812           com.google.protobuf.RpcController controller,
58813           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest request)
58814           throws com.google.protobuf.ServiceException {
58815         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse) channel.callBlockingMethod(
58816           getDescriptor().getMethods().get(40),
58817           controller,
58818           request,
58819           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse.getDefaultInstance());
58820       }
58821 
58822 
58823       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse createNamespace(
58824           com.google.protobuf.RpcController controller,
58825           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest request)
58826           throws com.google.protobuf.ServiceException {
58827         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse) channel.callBlockingMethod(
58828           getDescriptor().getMethods().get(41),
58829           controller,
58830           request,
58831           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse.getDefaultInstance());
58832       }
58833 
58834 
58835       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse deleteNamespace(
58836           com.google.protobuf.RpcController controller,
58837           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest request)
58838           throws com.google.protobuf.ServiceException {
58839         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse) channel.callBlockingMethod(
58840           getDescriptor().getMethods().get(42),
58841           controller,
58842           request,
58843           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse.getDefaultInstance());
58844       }
58845 
58846 
58847       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse getNamespaceDescriptor(
58848           com.google.protobuf.RpcController controller,
58849           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest request)
58850           throws com.google.protobuf.ServiceException {
58851         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) channel.callBlockingMethod(
58852           getDescriptor().getMethods().get(43),
58853           controller,
58854           request,
58855           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse.getDefaultInstance());
58856       }
58857 
58858 
58859       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse listNamespaceDescriptors(
58860           com.google.protobuf.RpcController controller,
58861           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest request)
58862           throws com.google.protobuf.ServiceException {
58863         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) channel.callBlockingMethod(
58864           getDescriptor().getMethods().get(44),
58865           controller,
58866           request,
58867           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse.getDefaultInstance());
58868       }
58869 
58870 
58871       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse listTableDescriptorsByNamespace(
58872           com.google.protobuf.RpcController controller,
58873           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest request)
58874           throws com.google.protobuf.ServiceException {
58875         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse) channel.callBlockingMethod(
58876           getDescriptor().getMethods().get(45),
58877           controller,
58878           request,
58879           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceResponse.getDefaultInstance());
58880       }
58881 
58882 
58883       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse listTableNamesByNamespace(
58884           com.google.protobuf.RpcController controller,
58885           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest request)
58886           throws com.google.protobuf.ServiceException {
58887         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse) channel.callBlockingMethod(
58888           getDescriptor().getMethods().get(46),
58889           controller,
58890           request,
58891           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceResponse.getDefaultInstance());
58892       }
58893 
58894 
58895       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse setQuota(
58896           com.google.protobuf.RpcController controller,
58897           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaRequest request)
58898           throws com.google.protobuf.ServiceException {
58899         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse) channel.callBlockingMethod(
58900           getDescriptor().getMethods().get(47),
58901           controller,
58902           request,
58903           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetQuotaResponse.getDefaultInstance());
58904       }
58905 
58906 
58907       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getLastMajorCompactionTimestamp(
58908           com.google.protobuf.RpcController controller,
58909           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest request)
58910           throws com.google.protobuf.ServiceException {
58911         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod(
58912           getDescriptor().getMethods().get(48),
58913           controller,
58914           request,
58915           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance());
58916       }
58917 
58918 
58919       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse getLastMajorCompactionTimestampForRegion(
58920           com.google.protobuf.RpcController controller,
58921           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest request)
58922           throws com.google.protobuf.ServiceException {
58923         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse) channel.callBlockingMethod(
58924           getDescriptor().getMethods().get(49),
58925           controller,
58926           request,
58927           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampResponse.getDefaultInstance());
58928       }
58929 
58930 
58931       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse getProcedureResult(
58932           com.google.protobuf.RpcController controller,
58933           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest request)
58934           throws com.google.protobuf.ServiceException {
58935         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse) channel.callBlockingMethod(
58936           getDescriptor().getMethods().get(50),
58937           controller,
58938           request,
58939           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse.getDefaultInstance());
58940       }
58941 
58942 
58943       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse getSecurityCapabilities(
58944           com.google.protobuf.RpcController controller,
58945           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest request)
58946           throws com.google.protobuf.ServiceException {
58947         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse) channel.callBlockingMethod(
58948           getDescriptor().getMethods().get(51),
58949           controller,
58950           request,
58951           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse.getDefaultInstance());
58952       }
58953 
58954 
58955       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse abortProcedure(
58956           com.google.protobuf.RpcController controller,
58957           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest request)
58958           throws com.google.protobuf.ServiceException {
58959         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse) channel.callBlockingMethod(
58960           getDescriptor().getMethods().get(52),
58961           controller,
58962           request,
58963           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse.getDefaultInstance());
58964       }
58965 
58966 
58967       public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse listProcedures(
58968           com.google.protobuf.RpcController controller,
58969           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest request)
58970           throws com.google.protobuf.ServiceException {
58971         return (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse) channel.callBlockingMethod(
58972           getDescriptor().getMethods().get(53),
58973           controller,
58974           request,
58975           org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresResponse.getDefaultInstance());
58976       }
58977 
58978     }
58979 
58980     // @@protoc_insertion_point(class_scope:MasterService)
58981   }
58982 
58983   private static com.google.protobuf.Descriptors.Descriptor
58984     internal_static_AddColumnRequest_descriptor;
58985   private static
58986     com.google.protobuf.GeneratedMessage.FieldAccessorTable
58987       internal_static_AddColumnRequest_fieldAccessorTable;
58988   private static com.google.protobuf.Descriptors.Descriptor
58989     internal_static_AddColumnResponse_descriptor;
58990   private static
58991     com.google.protobuf.GeneratedMessage.FieldAccessorTable
58992       internal_static_AddColumnResponse_fieldAccessorTable;
58993   private static com.google.protobuf.Descriptors.Descriptor
58994     internal_static_DeleteColumnRequest_descriptor;
58995   private static
58996     com.google.protobuf.GeneratedMessage.FieldAccessorTable
58997       internal_static_DeleteColumnRequest_fieldAccessorTable;
58998   private static com.google.protobuf.Descriptors.Descriptor
58999     internal_static_DeleteColumnResponse_descriptor;
59000   private static
59001     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59002       internal_static_DeleteColumnResponse_fieldAccessorTable;
59003   private static com.google.protobuf.Descriptors.Descriptor
59004     internal_static_ModifyColumnRequest_descriptor;
59005   private static
59006     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59007       internal_static_ModifyColumnRequest_fieldAccessorTable;
59008   private static com.google.protobuf.Descriptors.Descriptor
59009     internal_static_ModifyColumnResponse_descriptor;
59010   private static
59011     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59012       internal_static_ModifyColumnResponse_fieldAccessorTable;
59013   private static com.google.protobuf.Descriptors.Descriptor
59014     internal_static_MoveRegionRequest_descriptor;
59015   private static
59016     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59017       internal_static_MoveRegionRequest_fieldAccessorTable;
59018   private static com.google.protobuf.Descriptors.Descriptor
59019     internal_static_MoveRegionResponse_descriptor;
59020   private static
59021     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59022       internal_static_MoveRegionResponse_fieldAccessorTable;
59023   private static com.google.protobuf.Descriptors.Descriptor
59024     internal_static_DispatchMergingRegionsRequest_descriptor;
59025   private static
59026     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59027       internal_static_DispatchMergingRegionsRequest_fieldAccessorTable;
59028   private static com.google.protobuf.Descriptors.Descriptor
59029     internal_static_DispatchMergingRegionsResponse_descriptor;
59030   private static
59031     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59032       internal_static_DispatchMergingRegionsResponse_fieldAccessorTable;
59033   private static com.google.protobuf.Descriptors.Descriptor
59034     internal_static_AssignRegionRequest_descriptor;
59035   private static
59036     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59037       internal_static_AssignRegionRequest_fieldAccessorTable;
59038   private static com.google.protobuf.Descriptors.Descriptor
59039     internal_static_AssignRegionResponse_descriptor;
59040   private static
59041     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59042       internal_static_AssignRegionResponse_fieldAccessorTable;
59043   private static com.google.protobuf.Descriptors.Descriptor
59044     internal_static_UnassignRegionRequest_descriptor;
59045   private static
59046     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59047       internal_static_UnassignRegionRequest_fieldAccessorTable;
59048   private static com.google.protobuf.Descriptors.Descriptor
59049     internal_static_UnassignRegionResponse_descriptor;
59050   private static
59051     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59052       internal_static_UnassignRegionResponse_fieldAccessorTable;
59053   private static com.google.protobuf.Descriptors.Descriptor
59054     internal_static_OfflineRegionRequest_descriptor;
59055   private static
59056     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59057       internal_static_OfflineRegionRequest_fieldAccessorTable;
59058   private static com.google.protobuf.Descriptors.Descriptor
59059     internal_static_OfflineRegionResponse_descriptor;
59060   private static
59061     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59062       internal_static_OfflineRegionResponse_fieldAccessorTable;
59063   private static com.google.protobuf.Descriptors.Descriptor
59064     internal_static_CreateTableRequest_descriptor;
59065   private static
59066     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59067       internal_static_CreateTableRequest_fieldAccessorTable;
59068   private static com.google.protobuf.Descriptors.Descriptor
59069     internal_static_CreateTableResponse_descriptor;
59070   private static
59071     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59072       internal_static_CreateTableResponse_fieldAccessorTable;
59073   private static com.google.protobuf.Descriptors.Descriptor
59074     internal_static_DeleteTableRequest_descriptor;
59075   private static
59076     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59077       internal_static_DeleteTableRequest_fieldAccessorTable;
59078   private static com.google.protobuf.Descriptors.Descriptor
59079     internal_static_DeleteTableResponse_descriptor;
59080   private static
59081     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59082       internal_static_DeleteTableResponse_fieldAccessorTable;
59083   private static com.google.protobuf.Descriptors.Descriptor
59084     internal_static_TruncateTableRequest_descriptor;
59085   private static
59086     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59087       internal_static_TruncateTableRequest_fieldAccessorTable;
59088   private static com.google.protobuf.Descriptors.Descriptor
59089     internal_static_TruncateTableResponse_descriptor;
59090   private static
59091     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59092       internal_static_TruncateTableResponse_fieldAccessorTable;
59093   private static com.google.protobuf.Descriptors.Descriptor
59094     internal_static_EnableTableRequest_descriptor;
59095   private static
59096     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59097       internal_static_EnableTableRequest_fieldAccessorTable;
59098   private static com.google.protobuf.Descriptors.Descriptor
59099     internal_static_EnableTableResponse_descriptor;
59100   private static
59101     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59102       internal_static_EnableTableResponse_fieldAccessorTable;
59103   private static com.google.protobuf.Descriptors.Descriptor
59104     internal_static_DisableTableRequest_descriptor;
59105   private static
59106     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59107       internal_static_DisableTableRequest_fieldAccessorTable;
59108   private static com.google.protobuf.Descriptors.Descriptor
59109     internal_static_DisableTableResponse_descriptor;
59110   private static
59111     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59112       internal_static_DisableTableResponse_fieldAccessorTable;
59113   private static com.google.protobuf.Descriptors.Descriptor
59114     internal_static_ModifyTableRequest_descriptor;
59115   private static
59116     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59117       internal_static_ModifyTableRequest_fieldAccessorTable;
59118   private static com.google.protobuf.Descriptors.Descriptor
59119     internal_static_ModifyTableResponse_descriptor;
59120   private static
59121     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59122       internal_static_ModifyTableResponse_fieldAccessorTable;
59123   private static com.google.protobuf.Descriptors.Descriptor
59124     internal_static_CreateNamespaceRequest_descriptor;
59125   private static
59126     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59127       internal_static_CreateNamespaceRequest_fieldAccessorTable;
59128   private static com.google.protobuf.Descriptors.Descriptor
59129     internal_static_CreateNamespaceResponse_descriptor;
59130   private static
59131     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59132       internal_static_CreateNamespaceResponse_fieldAccessorTable;
59133   private static com.google.protobuf.Descriptors.Descriptor
59134     internal_static_DeleteNamespaceRequest_descriptor;
59135   private static
59136     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59137       internal_static_DeleteNamespaceRequest_fieldAccessorTable;
59138   private static com.google.protobuf.Descriptors.Descriptor
59139     internal_static_DeleteNamespaceResponse_descriptor;
59140   private static
59141     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59142       internal_static_DeleteNamespaceResponse_fieldAccessorTable;
59143   private static com.google.protobuf.Descriptors.Descriptor
59144     internal_static_ModifyNamespaceRequest_descriptor;
59145   private static
59146     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59147       internal_static_ModifyNamespaceRequest_fieldAccessorTable;
59148   private static com.google.protobuf.Descriptors.Descriptor
59149     internal_static_ModifyNamespaceResponse_descriptor;
59150   private static
59151     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59152       internal_static_ModifyNamespaceResponse_fieldAccessorTable;
59153   private static com.google.protobuf.Descriptors.Descriptor
59154     internal_static_GetNamespaceDescriptorRequest_descriptor;
59155   private static
59156     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59157       internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable;
59158   private static com.google.protobuf.Descriptors.Descriptor
59159     internal_static_GetNamespaceDescriptorResponse_descriptor;
59160   private static
59161     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59162       internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable;
59163   private static com.google.protobuf.Descriptors.Descriptor
59164     internal_static_ListNamespaceDescriptorsRequest_descriptor;
59165   private static
59166     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59167       internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable;
59168   private static com.google.protobuf.Descriptors.Descriptor
59169     internal_static_ListNamespaceDescriptorsResponse_descriptor;
59170   private static
59171     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59172       internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable;
59173   private static com.google.protobuf.Descriptors.Descriptor
59174     internal_static_ListTableDescriptorsByNamespaceRequest_descriptor;
59175   private static
59176     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59177       internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable;
59178   private static com.google.protobuf.Descriptors.Descriptor
59179     internal_static_ListTableDescriptorsByNamespaceResponse_descriptor;
59180   private static
59181     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59182       internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable;
59183   private static com.google.protobuf.Descriptors.Descriptor
59184     internal_static_ListTableNamesByNamespaceRequest_descriptor;
59185   private static
59186     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59187       internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable;
59188   private static com.google.protobuf.Descriptors.Descriptor
59189     internal_static_ListTableNamesByNamespaceResponse_descriptor;
59190   private static
59191     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59192       internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable;
59193   private static com.google.protobuf.Descriptors.Descriptor
59194     internal_static_ShutdownRequest_descriptor;
59195   private static
59196     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59197       internal_static_ShutdownRequest_fieldAccessorTable;
59198   private static com.google.protobuf.Descriptors.Descriptor
59199     internal_static_ShutdownResponse_descriptor;
59200   private static
59201     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59202       internal_static_ShutdownResponse_fieldAccessorTable;
59203   private static com.google.protobuf.Descriptors.Descriptor
59204     internal_static_StopMasterRequest_descriptor;
59205   private static
59206     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59207       internal_static_StopMasterRequest_fieldAccessorTable;
59208   private static com.google.protobuf.Descriptors.Descriptor
59209     internal_static_StopMasterResponse_descriptor;
59210   private static
59211     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59212       internal_static_StopMasterResponse_fieldAccessorTable;
59213   private static com.google.protobuf.Descriptors.Descriptor
59214     internal_static_BalanceRequest_descriptor;
59215   private static
59216     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59217       internal_static_BalanceRequest_fieldAccessorTable;
59218   private static com.google.protobuf.Descriptors.Descriptor
59219     internal_static_BalanceResponse_descriptor;
59220   private static
59221     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59222       internal_static_BalanceResponse_fieldAccessorTable;
59223   private static com.google.protobuf.Descriptors.Descriptor
59224     internal_static_SetBalancerRunningRequest_descriptor;
59225   private static
59226     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59227       internal_static_SetBalancerRunningRequest_fieldAccessorTable;
59228   private static com.google.protobuf.Descriptors.Descriptor
59229     internal_static_SetBalancerRunningResponse_descriptor;
59230   private static
59231     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59232       internal_static_SetBalancerRunningResponse_fieldAccessorTable;
59233   private static com.google.protobuf.Descriptors.Descriptor
59234     internal_static_IsBalancerEnabledRequest_descriptor;
59235   private static
59236     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59237       internal_static_IsBalancerEnabledRequest_fieldAccessorTable;
59238   private static com.google.protobuf.Descriptors.Descriptor
59239     internal_static_IsBalancerEnabledResponse_descriptor;
59240   private static
59241     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59242       internal_static_IsBalancerEnabledResponse_fieldAccessorTable;
59243   private static com.google.protobuf.Descriptors.Descriptor
59244     internal_static_NormalizeRequest_descriptor;
59245   private static
59246     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59247       internal_static_NormalizeRequest_fieldAccessorTable;
59248   private static com.google.protobuf.Descriptors.Descriptor
59249     internal_static_NormalizeResponse_descriptor;
59250   private static
59251     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59252       internal_static_NormalizeResponse_fieldAccessorTable;
59253   private static com.google.protobuf.Descriptors.Descriptor
59254     internal_static_SetNormalizerRunningRequest_descriptor;
59255   private static
59256     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59257       internal_static_SetNormalizerRunningRequest_fieldAccessorTable;
59258   private static com.google.protobuf.Descriptors.Descriptor
59259     internal_static_SetNormalizerRunningResponse_descriptor;
59260   private static
59261     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59262       internal_static_SetNormalizerRunningResponse_fieldAccessorTable;
59263   private static com.google.protobuf.Descriptors.Descriptor
59264     internal_static_IsNormalizerEnabledRequest_descriptor;
59265   private static
59266     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59267       internal_static_IsNormalizerEnabledRequest_fieldAccessorTable;
59268   private static com.google.protobuf.Descriptors.Descriptor
59269     internal_static_IsNormalizerEnabledResponse_descriptor;
59270   private static
59271     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59272       internal_static_IsNormalizerEnabledResponse_fieldAccessorTable;
59273   private static com.google.protobuf.Descriptors.Descriptor
59274     internal_static_RunCatalogScanRequest_descriptor;
59275   private static
59276     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59277       internal_static_RunCatalogScanRequest_fieldAccessorTable;
59278   private static com.google.protobuf.Descriptors.Descriptor
59279     internal_static_RunCatalogScanResponse_descriptor;
59280   private static
59281     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59282       internal_static_RunCatalogScanResponse_fieldAccessorTable;
59283   private static com.google.protobuf.Descriptors.Descriptor
59284     internal_static_EnableCatalogJanitorRequest_descriptor;
59285   private static
59286     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59287       internal_static_EnableCatalogJanitorRequest_fieldAccessorTable;
59288   private static com.google.protobuf.Descriptors.Descriptor
59289     internal_static_EnableCatalogJanitorResponse_descriptor;
59290   private static
59291     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59292       internal_static_EnableCatalogJanitorResponse_fieldAccessorTable;
59293   private static com.google.protobuf.Descriptors.Descriptor
59294     internal_static_IsCatalogJanitorEnabledRequest_descriptor;
59295   private static
59296     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59297       internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable;
59298   private static com.google.protobuf.Descriptors.Descriptor
59299     internal_static_IsCatalogJanitorEnabledResponse_descriptor;
59300   private static
59301     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59302       internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable;
59303   private static com.google.protobuf.Descriptors.Descriptor
59304     internal_static_SnapshotRequest_descriptor;
59305   private static
59306     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59307       internal_static_SnapshotRequest_fieldAccessorTable;
59308   private static com.google.protobuf.Descriptors.Descriptor
59309     internal_static_SnapshotResponse_descriptor;
59310   private static
59311     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59312       internal_static_SnapshotResponse_fieldAccessorTable;
59313   private static com.google.protobuf.Descriptors.Descriptor
59314     internal_static_GetCompletedSnapshotsRequest_descriptor;
59315   private static
59316     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59317       internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable;
59318   private static com.google.protobuf.Descriptors.Descriptor
59319     internal_static_GetCompletedSnapshotsResponse_descriptor;
59320   private static
59321     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59322       internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable;
59323   private static com.google.protobuf.Descriptors.Descriptor
59324     internal_static_DeleteSnapshotRequest_descriptor;
59325   private static
59326     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59327       internal_static_DeleteSnapshotRequest_fieldAccessorTable;
59328   private static com.google.protobuf.Descriptors.Descriptor
59329     internal_static_DeleteSnapshotResponse_descriptor;
59330   private static
59331     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59332       internal_static_DeleteSnapshotResponse_fieldAccessorTable;
59333   private static com.google.protobuf.Descriptors.Descriptor
59334     internal_static_RestoreSnapshotRequest_descriptor;
59335   private static
59336     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59337       internal_static_RestoreSnapshotRequest_fieldAccessorTable;
59338   private static com.google.protobuf.Descriptors.Descriptor
59339     internal_static_RestoreSnapshotResponse_descriptor;
59340   private static
59341     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59342       internal_static_RestoreSnapshotResponse_fieldAccessorTable;
59343   private static com.google.protobuf.Descriptors.Descriptor
59344     internal_static_IsSnapshotDoneRequest_descriptor;
59345   private static
59346     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59347       internal_static_IsSnapshotDoneRequest_fieldAccessorTable;
59348   private static com.google.protobuf.Descriptors.Descriptor
59349     internal_static_IsSnapshotDoneResponse_descriptor;
59350   private static
59351     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59352       internal_static_IsSnapshotDoneResponse_fieldAccessorTable;
59353   private static com.google.protobuf.Descriptors.Descriptor
59354     internal_static_IsRestoreSnapshotDoneRequest_descriptor;
59355   private static
59356     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59357       internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable;
59358   private static com.google.protobuf.Descriptors.Descriptor
59359     internal_static_IsRestoreSnapshotDoneResponse_descriptor;
59360   private static
59361     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59362       internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable;
59363   private static com.google.protobuf.Descriptors.Descriptor
59364     internal_static_GetSchemaAlterStatusRequest_descriptor;
59365   private static
59366     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59367       internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable;
59368   private static com.google.protobuf.Descriptors.Descriptor
59369     internal_static_GetSchemaAlterStatusResponse_descriptor;
59370   private static
59371     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59372       internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable;
59373   private static com.google.protobuf.Descriptors.Descriptor
59374     internal_static_GetTableDescriptorsRequest_descriptor;
59375   private static
59376     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59377       internal_static_GetTableDescriptorsRequest_fieldAccessorTable;
59378   private static com.google.protobuf.Descriptors.Descriptor
59379     internal_static_GetTableDescriptorsResponse_descriptor;
59380   private static
59381     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59382       internal_static_GetTableDescriptorsResponse_fieldAccessorTable;
59383   private static com.google.protobuf.Descriptors.Descriptor
59384     internal_static_GetTableNamesRequest_descriptor;
59385   private static
59386     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59387       internal_static_GetTableNamesRequest_fieldAccessorTable;
59388   private static com.google.protobuf.Descriptors.Descriptor
59389     internal_static_GetTableNamesResponse_descriptor;
59390   private static
59391     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59392       internal_static_GetTableNamesResponse_fieldAccessorTable;
59393   private static com.google.protobuf.Descriptors.Descriptor
59394     internal_static_GetClusterStatusRequest_descriptor;
59395   private static
59396     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59397       internal_static_GetClusterStatusRequest_fieldAccessorTable;
59398   private static com.google.protobuf.Descriptors.Descriptor
59399     internal_static_GetClusterStatusResponse_descriptor;
59400   private static
59401     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59402       internal_static_GetClusterStatusResponse_fieldAccessorTable;
59403   private static com.google.protobuf.Descriptors.Descriptor
59404     internal_static_IsMasterRunningRequest_descriptor;
59405   private static
59406     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59407       internal_static_IsMasterRunningRequest_fieldAccessorTable;
59408   private static com.google.protobuf.Descriptors.Descriptor
59409     internal_static_IsMasterRunningResponse_descriptor;
59410   private static
59411     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59412       internal_static_IsMasterRunningResponse_fieldAccessorTable;
59413   private static com.google.protobuf.Descriptors.Descriptor
59414     internal_static_ExecProcedureRequest_descriptor;
59415   private static
59416     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59417       internal_static_ExecProcedureRequest_fieldAccessorTable;
59418   private static com.google.protobuf.Descriptors.Descriptor
59419     internal_static_ExecProcedureResponse_descriptor;
59420   private static
59421     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59422       internal_static_ExecProcedureResponse_fieldAccessorTable;
59423   private static com.google.protobuf.Descriptors.Descriptor
59424     internal_static_IsProcedureDoneRequest_descriptor;
59425   private static
59426     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59427       internal_static_IsProcedureDoneRequest_fieldAccessorTable;
59428   private static com.google.protobuf.Descriptors.Descriptor
59429     internal_static_IsProcedureDoneResponse_descriptor;
59430   private static
59431     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59432       internal_static_IsProcedureDoneResponse_fieldAccessorTable;
59433   private static com.google.protobuf.Descriptors.Descriptor
59434     internal_static_GetProcedureResultRequest_descriptor;
59435   private static
59436     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59437       internal_static_GetProcedureResultRequest_fieldAccessorTable;
59438   private static com.google.protobuf.Descriptors.Descriptor
59439     internal_static_GetProcedureResultResponse_descriptor;
59440   private static
59441     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59442       internal_static_GetProcedureResultResponse_fieldAccessorTable;
59443   private static com.google.protobuf.Descriptors.Descriptor
59444     internal_static_AbortProcedureRequest_descriptor;
59445   private static
59446     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59447       internal_static_AbortProcedureRequest_fieldAccessorTable;
59448   private static com.google.protobuf.Descriptors.Descriptor
59449     internal_static_AbortProcedureResponse_descriptor;
59450   private static
59451     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59452       internal_static_AbortProcedureResponse_fieldAccessorTable;
59453   private static com.google.protobuf.Descriptors.Descriptor
59454     internal_static_ListProceduresRequest_descriptor;
59455   private static
59456     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59457       internal_static_ListProceduresRequest_fieldAccessorTable;
59458   private static com.google.protobuf.Descriptors.Descriptor
59459     internal_static_ListProceduresResponse_descriptor;
59460   private static
59461     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59462       internal_static_ListProceduresResponse_fieldAccessorTable;
59463   private static com.google.protobuf.Descriptors.Descriptor
59464     internal_static_SetQuotaRequest_descriptor;
59465   private static
59466     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59467       internal_static_SetQuotaRequest_fieldAccessorTable;
59468   private static com.google.protobuf.Descriptors.Descriptor
59469     internal_static_SetQuotaResponse_descriptor;
59470   private static
59471     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59472       internal_static_SetQuotaResponse_fieldAccessorTable;
59473   private static com.google.protobuf.Descriptors.Descriptor
59474     internal_static_MajorCompactionTimestampRequest_descriptor;
59475   private static
59476     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59477       internal_static_MajorCompactionTimestampRequest_fieldAccessorTable;
59478   private static com.google.protobuf.Descriptors.Descriptor
59479     internal_static_MajorCompactionTimestampForRegionRequest_descriptor;
59480   private static
59481     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59482       internal_static_MajorCompactionTimestampForRegionRequest_fieldAccessorTable;
59483   private static com.google.protobuf.Descriptors.Descriptor
59484     internal_static_MajorCompactionTimestampResponse_descriptor;
59485   private static
59486     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59487       internal_static_MajorCompactionTimestampResponse_fieldAccessorTable;
59488   private static com.google.protobuf.Descriptors.Descriptor
59489     internal_static_SecurityCapabilitiesRequest_descriptor;
59490   private static
59491     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59492       internal_static_SecurityCapabilitiesRequest_fieldAccessorTable;
59493   private static com.google.protobuf.Descriptors.Descriptor
59494     internal_static_SecurityCapabilitiesResponse_descriptor;
59495   private static
59496     com.google.protobuf.GeneratedMessage.FieldAccessorTable
59497       internal_static_SecurityCapabilitiesResponse_fieldAccessorTable;
59498 
59499   public static com.google.protobuf.Descriptors.FileDescriptor
59500       getDescriptor() {
59501     return descriptor;
59502   }
59503   private static com.google.protobuf.Descriptors.FileDescriptor
59504       descriptor;
59505   static {
59506     java.lang.String[] descriptorData = {
59507       "\n\014Master.proto\032\013HBase.proto\032\014Client.prot" +
59508       "o\032\023ClusterStatus.proto\032\023ErrorHandling.pr" +
59509       "oto\032\017Procedure.proto\032\013Quota.proto\"\212\001\n\020Ad" +
59510       "dColumnRequest\022\036\n\ntable_name\030\001 \002(\0132\n.Tab" +
59511       "leName\022,\n\017column_families\030\002 \002(\0132\023.Column" +
59512       "FamilySchema\022\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005" +
59513       "nonce\030\004 \001(\004:\0010\"\023\n\021AddColumnResponse\"t\n\023D" +
59514       "eleteColumnRequest\022\036\n\ntable_name\030\001 \002(\0132\n" +
59515       ".TableName\022\023\n\013column_name\030\002 \002(\014\022\026\n\013nonce" +
59516       "_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\026\n\024Del",
59517       "eteColumnResponse\"\215\001\n\023ModifyColumnReques" +
59518       "t\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022,\n\017col" +
59519       "umn_families\030\002 \002(\0132\023.ColumnFamilySchema\022" +
59520       "\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\001" +
59521       "0\"\026\n\024ModifyColumnResponse\"\\\n\021MoveRegionR" +
59522       "equest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier" +
59523       "\022%\n\020dest_server_name\030\002 \001(\0132\013.ServerName\"" +
59524       "\024\n\022MoveRegionResponse\"\200\001\n\035DispatchMergin" +
59525       "gRegionsRequest\022\"\n\010region_a\030\001 \002(\0132\020.Regi" +
59526       "onSpecifier\022\"\n\010region_b\030\002 \002(\0132\020.RegionSp",
59527       "ecifier\022\027\n\010forcible\030\003 \001(\010:\005false\" \n\036Disp" +
59528       "atchMergingRegionsResponse\"7\n\023AssignRegi" +
59529       "onRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecif" +
59530       "ier\"\026\n\024AssignRegionResponse\"O\n\025UnassignR" +
59531       "egionRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpe" +
59532       "cifier\022\024\n\005force\030\002 \001(\010:\005false\"\030\n\026Unassign" +
59533       "RegionResponse\"8\n\024OfflineRegionRequest\022 " +
59534       "\n\006region\030\001 \002(\0132\020.RegionSpecifier\"\027\n\025Offl" +
59535       "ineRegionResponse\"v\n\022CreateTableRequest\022" +
59536       "\"\n\014table_schema\030\001 \002(\0132\014.TableSchema\022\022\n\ns",
59537       "plit_keys\030\002 \003(\014\022\026\n\013nonce_group\030\003 \001(\004:\0010\022" +
59538       "\020\n\005nonce\030\004 \001(\004:\0010\"&\n\023CreateTableResponse" +
59539       "\022\017\n\007proc_id\030\001 \001(\004\"^\n\022DeleteTableRequest\022" +
59540       "\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\026\n\013nonce" +
59541       "_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:\0010\"&\n\023Del" +
59542       "eteTableResponse\022\017\n\007proc_id\030\001 \001(\004\"~\n\024Tru" +
59543       "ncateTableRequest\022\035\n\ttableName\030\001 \002(\0132\n.T" +
59544       "ableName\022\035\n\016preserveSplits\030\002 \001(\010:\005false\022" +
59545       "\026\n\013nonce_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\001" +
59546       "0\"\027\n\025TruncateTableResponse\"^\n\022EnableTabl",
59547       "eRequest\022\036\n\ntable_name\030\001 \002(\0132\n.TableName" +
59548       "\022\026\n\013nonce_group\030\002 \001(\004:\0010\022\020\n\005nonce\030\003 \001(\004:" +
59549       "\0010\"&\n\023EnableTableResponse\022\017\n\007proc_id\030\001 \001" +
59550       "(\004\"_\n\023DisableTableRequest\022\036\n\ntable_name\030" +
59551       "\001 \002(\0132\n.TableName\022\026\n\013nonce_group\030\002 \001(\004:\001" +
59552       "0\022\020\n\005nonce\030\003 \001(\004:\0010\"\'\n\024DisableTableRespo" +
59553       "nse\022\017\n\007proc_id\030\001 \001(\004\"\202\001\n\022ModifyTableRequ" +
59554       "est\022\036\n\ntable_name\030\001 \002(\0132\n.TableName\022\"\n\014t" +
59555       "able_schema\030\002 \002(\0132\014.TableSchema\022\026\n\013nonce" +
59556       "_group\030\003 \001(\004:\0010\022\020\n\005nonce\030\004 \001(\004:\0010\"\025\n\023Mod",
59557       "ifyTableResponse\"K\n\026CreateNamespaceReque" +
59558       "st\0221\n\023namespaceDescriptor\030\001 \002(\0132\024.Namesp" +
59559       "aceDescriptor\"\031\n\027CreateNamespaceResponse" +
59560       "\"/\n\026DeleteNamespaceRequest\022\025\n\rnamespaceN" +
59561       "ame\030\001 \002(\t\"\031\n\027DeleteNamespaceResponse\"K\n\026" +
59562       "ModifyNamespaceRequest\0221\n\023namespaceDescr" +
59563       "iptor\030\001 \002(\0132\024.NamespaceDescriptor\"\031\n\027Mod" +
59564       "ifyNamespaceResponse\"6\n\035GetNamespaceDesc" +
59565       "riptorRequest\022\025\n\rnamespaceName\030\001 \002(\t\"S\n\036" +
59566       "GetNamespaceDescriptorResponse\0221\n\023namesp",
59567       "aceDescriptor\030\001 \002(\0132\024.NamespaceDescripto" +
59568       "r\"!\n\037ListNamespaceDescriptorsRequest\"U\n " +
59569       "ListNamespaceDescriptorsResponse\0221\n\023name" +
59570       "spaceDescriptor\030\001 \003(\0132\024.NamespaceDescrip" +
59571       "tor\"?\n&ListTableDescriptorsByNamespaceRe" +
59572       "quest\022\025\n\rnamespaceName\030\001 \002(\t\"L\n\'ListTabl" +
59573       "eDescriptorsByNamespaceResponse\022!\n\013table" +
59574       "Schema\030\001 \003(\0132\014.TableSchema\"9\n ListTableN" +
59575       "amesByNamespaceRequest\022\025\n\rnamespaceName\030" +
59576       "\001 \002(\t\"B\n!ListTableNamesByNamespaceRespon",
59577       "se\022\035\n\ttableName\030\001 \003(\0132\n.TableName\"\021\n\017Shu" +
59578       "tdownRequest\"\022\n\020ShutdownResponse\"\023\n\021Stop" +
59579       "MasterRequest\"\024\n\022StopMasterResponse\"\020\n\016B" +
59580       "alanceRequest\"\'\n\017BalanceResponse\022\024\n\014bala" +
59581       "ncer_ran\030\001 \002(\010\"<\n\031SetBalancerRunningRequ" +
59582       "est\022\n\n\002on\030\001 \002(\010\022\023\n\013synchronous\030\002 \001(\010\"8\n\032" +
59583       "SetBalancerRunningResponse\022\032\n\022prev_balan" +
59584       "ce_value\030\001 \001(\010\"\032\n\030IsBalancerEnabledReque" +
59585       "st\",\n\031IsBalancerEnabledResponse\022\017\n\007enabl" +
59586       "ed\030\001 \002(\010\"\022\n\020NormalizeRequest\"+\n\021Normaliz",
59587       "eResponse\022\026\n\016normalizer_ran\030\001 \002(\010\")\n\033Set" +
59588       "NormalizerRunningRequest\022\n\n\002on\030\001 \002(\010\"=\n\034" +
59589       "SetNormalizerRunningResponse\022\035\n\025prev_nor" +
59590       "malizer_value\030\001 \001(\010\"\034\n\032IsNormalizerEnabl" +
59591       "edRequest\".\n\033IsNormalizerEnabledResponse" +
59592       "\022\017\n\007enabled\030\001 \002(\010\"\027\n\025RunCatalogScanReque" +
59593       "st\"-\n\026RunCatalogScanResponse\022\023\n\013scan_res" +
59594       "ult\030\001 \001(\005\"-\n\033EnableCatalogJanitorRequest" +
59595       "\022\016\n\006enable\030\001 \002(\010\"2\n\034EnableCatalogJanitor" +
59596       "Response\022\022\n\nprev_value\030\001 \001(\010\" \n\036IsCatalo",
59597       "gJanitorEnabledRequest\"0\n\037IsCatalogJanit" +
59598       "orEnabledResponse\022\r\n\005value\030\001 \002(\010\"9\n\017Snap" +
59599       "shotRequest\022&\n\010snapshot\030\001 \002(\0132\024.Snapshot" +
59600       "Description\",\n\020SnapshotResponse\022\030\n\020expec" +
59601       "ted_timeout\030\001 \002(\003\"\036\n\034GetCompletedSnapsho" +
59602       "tsRequest\"H\n\035GetCompletedSnapshotsRespon" +
59603       "se\022\'\n\tsnapshots\030\001 \003(\0132\024.SnapshotDescript" +
59604       "ion\"?\n\025DeleteSnapshotRequest\022&\n\010snapshot" +
59605       "\030\001 \002(\0132\024.SnapshotDescription\"\030\n\026DeleteSn" +
59606       "apshotResponse\"@\n\026RestoreSnapshotRequest",
59607       "\022&\n\010snapshot\030\001 \002(\0132\024.SnapshotDescription" +
59608       "\"\031\n\027RestoreSnapshotResponse\"?\n\025IsSnapsho" +
59609       "tDoneRequest\022&\n\010snapshot\030\001 \001(\0132\024.Snapsho" +
59610       "tDescription\"U\n\026IsSnapshotDoneResponse\022\023" +
59611       "\n\004done\030\001 \001(\010:\005false\022&\n\010snapshot\030\002 \001(\0132\024." +
59612       "SnapshotDescription\"F\n\034IsRestoreSnapshot" +
59613       "DoneRequest\022&\n\010snapshot\030\001 \001(\0132\024.Snapshot" +
59614       "Description\"4\n\035IsRestoreSnapshotDoneResp" +
59615       "onse\022\023\n\004done\030\001 \001(\010:\005false\"=\n\033GetSchemaAl" +
59616       "terStatusRequest\022\036\n\ntable_name\030\001 \002(\0132\n.T",
59617       "ableName\"T\n\034GetSchemaAlterStatusResponse" +
59618       "\022\035\n\025yet_to_update_regions\030\001 \001(\r\022\025\n\rtotal" +
59619       "_regions\030\002 \001(\r\"\202\001\n\032GetTableDescriptorsRe" +
59620       "quest\022\037\n\013table_names\030\001 \003(\0132\n.TableName\022\r" +
59621       "\n\005regex\030\002 \001(\t\022!\n\022include_sys_tables\030\003 \001(" +
59622       "\010:\005false\022\021\n\tnamespace\030\004 \001(\t\"A\n\033GetTableD" +
59623       "escriptorsResponse\022\"\n\014table_schema\030\001 \003(\013" +
59624       "2\014.TableSchema\"[\n\024GetTableNamesRequest\022\r" +
59625       "\n\005regex\030\001 \001(\t\022!\n\022include_sys_tables\030\002 \001(" +
59626       "\010:\005false\022\021\n\tnamespace\030\003 \001(\t\"8\n\025GetTableN",
59627       "amesResponse\022\037\n\013table_names\030\001 \003(\0132\n.Tabl" +
59628       "eName\"\031\n\027GetClusterStatusRequest\"B\n\030GetC" +
59629       "lusterStatusResponse\022&\n\016cluster_status\030\001" +
59630       " \002(\0132\016.ClusterStatus\"\030\n\026IsMasterRunningR" +
59631       "equest\"4\n\027IsMasterRunningResponse\022\031\n\021is_" +
59632       "master_running\030\001 \002(\010\"@\n\024ExecProcedureReq" +
59633       "uest\022(\n\tprocedure\030\001 \002(\0132\025.ProcedureDescr" +
59634       "iption\"F\n\025ExecProcedureResponse\022\030\n\020expec" +
59635       "ted_timeout\030\001 \001(\003\022\023\n\013return_data\030\002 \001(\014\"B" +
59636       "\n\026IsProcedureDoneRequest\022(\n\tprocedure\030\001 ",
59637       "\001(\0132\025.ProcedureDescription\"W\n\027IsProcedur" +
59638       "eDoneResponse\022\023\n\004done\030\001 \001(\010:\005false\022\'\n\010sn" +
59639       "apshot\030\002 \001(\0132\025.ProcedureDescription\",\n\031G" +
59640       "etProcedureResultRequest\022\017\n\007proc_id\030\001 \002(" +
59641       "\004\"\347\001\n\032GetProcedureResultResponse\0220\n\005stat" +
59642       "e\030\001 \002(\0162!.GetProcedureResultResponse.Sta" +
59643       "te\022\022\n\nstart_time\030\002 \001(\004\022\023\n\013last_update\030\003 " +
59644       "\001(\004\022\016\n\006result\030\004 \001(\014\022+\n\texception\030\005 \001(\0132\030" +
59645       ".ForeignExceptionMessage\"1\n\005State\022\r\n\tNOT" +
59646       "_FOUND\020\000\022\013\n\007RUNNING\020\001\022\014\n\010FINISHED\020\002\"M\n\025A",
59647       "bortProcedureRequest\022\017\n\007proc_id\030\001 \002(\004\022#\n" +
59648       "\025mayInterruptIfRunning\030\002 \001(\010:\004true\"6\n\026Ab" +
59649       "ortProcedureResponse\022\034\n\024is_procedure_abo" +
59650       "rted\030\001 \002(\010\"\027\n\025ListProceduresRequest\"7\n\026L" +
59651       "istProceduresResponse\022\035\n\tprocedure\030\001 \003(\013" +
59652       "2\n.Procedure\"\273\001\n\017SetQuotaRequest\022\021\n\tuser" +
59653       "_name\030\001 \001(\t\022\022\n\nuser_group\030\002 \001(\t\022\021\n\tnames" +
59654       "pace\030\003 \001(\t\022\036\n\ntable_name\030\004 \001(\0132\n.TableNa" +
59655       "me\022\022\n\nremove_all\030\005 \001(\010\022\026\n\016bypass_globals" +
59656       "\030\006 \001(\010\022\"\n\010throttle\030\007 \001(\0132\020.ThrottleReque",
59657       "st\"\022\n\020SetQuotaResponse\"A\n\037MajorCompactio" +
59658       "nTimestampRequest\022\036\n\ntable_name\030\001 \002(\0132\n." +
59659       "TableName\"L\n(MajorCompactionTimestampFor" +
59660       "RegionRequest\022 \n\006region\030\001 \002(\0132\020.RegionSp" +
59661       "ecifier\"@\n MajorCompactionTimestampRespo" +
59662       "nse\022\034\n\024compaction_timestamp\030\001 \002(\003\"\035\n\033Sec" +
59663       "urityCapabilitiesRequest\"\343\001\n\034SecurityCap" +
59664       "abilitiesResponse\022>\n\014capabilities\030\001 \003(\0162" +
59665       "(.SecurityCapabilitiesResponse.Capabilit" +
59666       "y\"\202\001\n\nCapability\022\031\n\025SIMPLE_AUTHENTICATIO",
59667       "N\020\000\022\031\n\025SECURE_AUTHENTICATION\020\001\022\021\n\rAUTHOR" +
59668       "IZATION\020\002\022\026\n\022CELL_AUTHORIZATION\020\003\022\023\n\017CEL" +
59669       "L_VISIBILITY\020\0042\334\036\n\rMasterService\022S\n\024GetS" +
59670       "chemaAlterStatus\022\034.GetSchemaAlterStatusR" +
59671       "equest\032\035.GetSchemaAlterStatusResponse\022P\n" +
59672       "\023GetTableDescriptors\022\033.GetTableDescripto" +
59673       "rsRequest\032\034.GetTableDescriptorsResponse\022" +
59674       ">\n\rGetTableNames\022\025.GetTableNamesRequest\032" +
59675       "\026.GetTableNamesResponse\022G\n\020GetClusterSta" +
59676       "tus\022\030.GetClusterStatusRequest\032\031.GetClust",
59677       "erStatusResponse\022D\n\017IsMasterRunning\022\027.Is" +
59678       "MasterRunningRequest\032\030.IsMasterRunningRe" +
59679       "sponse\0222\n\tAddColumn\022\021.AddColumnRequest\032\022" +
59680       ".AddColumnResponse\022;\n\014DeleteColumn\022\024.Del" +
59681       "eteColumnRequest\032\025.DeleteColumnResponse\022" +
59682       ";\n\014ModifyColumn\022\024.ModifyColumnRequest\032\025." +
59683       "ModifyColumnResponse\0225\n\nMoveRegion\022\022.Mov" +
59684       "eRegionRequest\032\023.MoveRegionResponse\022Y\n\026D" +
59685       "ispatchMergingRegions\022\036.DispatchMergingR" +
59686       "egionsRequest\032\037.DispatchMergingRegionsRe",
59687       "sponse\022;\n\014AssignRegion\022\024.AssignRegionReq" +
59688       "uest\032\025.AssignRegionResponse\022A\n\016UnassignR" +
59689       "egion\022\026.UnassignRegionRequest\032\027.Unassign" +
59690       "RegionResponse\022>\n\rOfflineRegion\022\025.Offlin" +
59691       "eRegionRequest\032\026.OfflineRegionResponse\0228" +
59692       "\n\013DeleteTable\022\023.DeleteTableRequest\032\024.Del" +
59693       "eteTableResponse\022>\n\rtruncateTable\022\025.Trun" +
59694       "cateTableRequest\032\026.TruncateTableResponse" +
59695       "\0228\n\013EnableTable\022\023.EnableTableRequest\032\024.E" +
59696       "nableTableResponse\022;\n\014DisableTable\022\024.Dis",
59697       "ableTableRequest\032\025.DisableTableResponse\022" +
59698       "8\n\013ModifyTable\022\023.ModifyTableRequest\032\024.Mo" +
59699       "difyTableResponse\0228\n\013CreateTable\022\023.Creat" +
59700       "eTableRequest\032\024.CreateTableResponse\022/\n\010S" +
59701       "hutdown\022\020.ShutdownRequest\032\021.ShutdownResp" +
59702       "onse\0225\n\nStopMaster\022\022.StopMasterRequest\032\023" +
59703       ".StopMasterResponse\022,\n\007Balance\022\017.Balance" +
59704       "Request\032\020.BalanceResponse\022M\n\022SetBalancer" +
59705       "Running\022\032.SetBalancerRunningRequest\032\033.Se" +
59706       "tBalancerRunningResponse\022J\n\021IsBalancerEn",
59707       "abled\022\031.IsBalancerEnabledRequest\032\032.IsBal" +
59708       "ancerEnabledResponse\0222\n\tNormalize\022\021.Norm" +
59709       "alizeRequest\032\022.NormalizeResponse\022S\n\024SetN" +
59710       "ormalizerRunning\022\034.SetNormalizerRunningR" +
59711       "equest\032\035.SetNormalizerRunningResponse\022P\n" +
59712       "\023IsNormalizerEnabled\022\033.IsNormalizerEnabl" +
59713       "edRequest\032\034.IsNormalizerEnabledResponse\022" +
59714       "A\n\016RunCatalogScan\022\026.RunCatalogScanReques" +
59715       "t\032\027.RunCatalogScanResponse\022S\n\024EnableCata" +
59716       "logJanitor\022\034.EnableCatalogJanitorRequest",
59717       "\032\035.EnableCatalogJanitorResponse\022\\\n\027IsCat" +
59718       "alogJanitorEnabled\022\037.IsCatalogJanitorEna" +
59719       "bledRequest\032 .IsCatalogJanitorEnabledRes" +
59720       "ponse\022L\n\021ExecMasterService\022\032.Coprocessor" +
59721       "ServiceRequest\032\033.CoprocessorServiceRespo" +
59722       "nse\022/\n\010Snapshot\022\020.SnapshotRequest\032\021.Snap" +
59723       "shotResponse\022V\n\025GetCompletedSnapshots\022\035." +
59724       "GetCompletedSnapshotsRequest\032\036.GetComple" +
59725       "tedSnapshotsResponse\022A\n\016DeleteSnapshot\022\026" +
59726       ".DeleteSnapshotRequest\032\027.DeleteSnapshotR",
59727       "esponse\022A\n\016IsSnapshotDone\022\026.IsSnapshotDo" +
59728       "neRequest\032\027.IsSnapshotDoneResponse\022D\n\017Re" +
59729       "storeSnapshot\022\027.RestoreSnapshotRequest\032\030" +
59730       ".RestoreSnapshotResponse\022V\n\025IsRestoreSna" +
59731       "pshotDone\022\035.IsRestoreSnapshotDoneRequest" +
59732       "\032\036.IsRestoreSnapshotDoneResponse\022>\n\rExec" +
59733       "Procedure\022\025.ExecProcedureRequest\032\026.ExecP" +
59734       "rocedureResponse\022E\n\024ExecProcedureWithRet" +
59735       "\022\025.ExecProcedureRequest\032\026.ExecProcedureR" +
59736       "esponse\022D\n\017IsProcedureDone\022\027.IsProcedure",
59737       "DoneRequest\032\030.IsProcedureDoneResponse\022D\n" +
59738       "\017ModifyNamespace\022\027.ModifyNamespaceReques" +
59739       "t\032\030.ModifyNamespaceResponse\022D\n\017CreateNam" +
59740       "espace\022\027.CreateNamespaceRequest\032\030.Create" +
59741       "NamespaceResponse\022D\n\017DeleteNamespace\022\027.D" +
59742       "eleteNamespaceRequest\032\030.DeleteNamespaceR" +
59743       "esponse\022Y\n\026GetNamespaceDescriptor\022\036.GetN" +
59744       "amespaceDescriptorRequest\032\037.GetNamespace" +
59745       "DescriptorResponse\022_\n\030ListNamespaceDescr" +
59746       "iptors\022 .ListNamespaceDescriptorsRequest",
59747       "\032!.ListNamespaceDescriptorsResponse\022t\n\037L" +
59748       "istTableDescriptorsByNamespace\022\'.ListTab" +
59749       "leDescriptorsByNamespaceRequest\032(.ListTa" +
59750       "bleDescriptorsByNamespaceResponse\022b\n\031Lis" +
59751       "tTableNamesByNamespace\022!.ListTableNamesB" +
59752       "yNamespaceRequest\032\".ListTableNamesByName" +
59753       "spaceResponse\022/\n\010SetQuota\022\020.SetQuotaRequ" +
59754       "est\032\021.SetQuotaResponse\022f\n\037getLastMajorCo" +
59755       "mpactionTimestamp\022 .MajorCompactionTimes" +
59756       "tampRequest\032!.MajorCompactionTimestampRe",
59757       "sponse\022x\n(getLastMajorCompactionTimestam" +
59758       "pForRegion\022).MajorCompactionTimestampFor" +
59759       "RegionRequest\032!.MajorCompactionTimestamp" +
59760       "Response\022M\n\022getProcedureResult\022\032.GetProc" +
59761       "edureResultRequest\032\033.GetProcedureResultR" +
59762       "esponse\022V\n\027getSecurityCapabilities\022\034.Sec" +
59763       "urityCapabilitiesRequest\032\035.SecurityCapab" +
59764       "ilitiesResponse\022A\n\016AbortProcedure\022\026.Abor" +
59765       "tProcedureRequest\032\027.AbortProcedureRespon" +
59766       "se\022A\n\016ListProcedures\022\026.ListProceduresReq",
59767       "uest\032\027.ListProceduresResponseBB\n*org.apa" +
59768       "che.hadoop.hbase.protobuf.generatedB\014Mas" +
59769       "terProtosH\001\210\001\001\240\001\001"
59770     };
59771     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
59772       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
59773         public com.google.protobuf.ExtensionRegistry assignDescriptors(
59774             com.google.protobuf.Descriptors.FileDescriptor root) {
59775           descriptor = root;
59776           internal_static_AddColumnRequest_descriptor =
59777             getDescriptor().getMessageTypes().get(0);
59778           internal_static_AddColumnRequest_fieldAccessorTable = new
59779             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59780               internal_static_AddColumnRequest_descriptor,
59781               new java.lang.String[] { "TableName", "ColumnFamilies", "NonceGroup", "Nonce", });
59782           internal_static_AddColumnResponse_descriptor =
59783             getDescriptor().getMessageTypes().get(1);
59784           internal_static_AddColumnResponse_fieldAccessorTable = new
59785             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59786               internal_static_AddColumnResponse_descriptor,
59787               new java.lang.String[] { });
59788           internal_static_DeleteColumnRequest_descriptor =
59789             getDescriptor().getMessageTypes().get(2);
59790           internal_static_DeleteColumnRequest_fieldAccessorTable = new
59791             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59792               internal_static_DeleteColumnRequest_descriptor,
59793               new java.lang.String[] { "TableName", "ColumnName", "NonceGroup", "Nonce", });
59794           internal_static_DeleteColumnResponse_descriptor =
59795             getDescriptor().getMessageTypes().get(3);
59796           internal_static_DeleteColumnResponse_fieldAccessorTable = new
59797             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59798               internal_static_DeleteColumnResponse_descriptor,
59799               new java.lang.String[] { });
59800           internal_static_ModifyColumnRequest_descriptor =
59801             getDescriptor().getMessageTypes().get(4);
59802           internal_static_ModifyColumnRequest_fieldAccessorTable = new
59803             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59804               internal_static_ModifyColumnRequest_descriptor,
59805               new java.lang.String[] { "TableName", "ColumnFamilies", "NonceGroup", "Nonce", });
59806           internal_static_ModifyColumnResponse_descriptor =
59807             getDescriptor().getMessageTypes().get(5);
59808           internal_static_ModifyColumnResponse_fieldAccessorTable = new
59809             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59810               internal_static_ModifyColumnResponse_descriptor,
59811               new java.lang.String[] { });
59812           internal_static_MoveRegionRequest_descriptor =
59813             getDescriptor().getMessageTypes().get(6);
59814           internal_static_MoveRegionRequest_fieldAccessorTable = new
59815             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59816               internal_static_MoveRegionRequest_descriptor,
59817               new java.lang.String[] { "Region", "DestServerName", });
59818           internal_static_MoveRegionResponse_descriptor =
59819             getDescriptor().getMessageTypes().get(7);
59820           internal_static_MoveRegionResponse_fieldAccessorTable = new
59821             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59822               internal_static_MoveRegionResponse_descriptor,
59823               new java.lang.String[] { });
59824           internal_static_DispatchMergingRegionsRequest_descriptor =
59825             getDescriptor().getMessageTypes().get(8);
59826           internal_static_DispatchMergingRegionsRequest_fieldAccessorTable = new
59827             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59828               internal_static_DispatchMergingRegionsRequest_descriptor,
59829               new java.lang.String[] { "RegionA", "RegionB", "Forcible", });
59830           internal_static_DispatchMergingRegionsResponse_descriptor =
59831             getDescriptor().getMessageTypes().get(9);
59832           internal_static_DispatchMergingRegionsResponse_fieldAccessorTable = new
59833             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59834               internal_static_DispatchMergingRegionsResponse_descriptor,
59835               new java.lang.String[] { });
59836           internal_static_AssignRegionRequest_descriptor =
59837             getDescriptor().getMessageTypes().get(10);
59838           internal_static_AssignRegionRequest_fieldAccessorTable = new
59839             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59840               internal_static_AssignRegionRequest_descriptor,
59841               new java.lang.String[] { "Region", });
59842           internal_static_AssignRegionResponse_descriptor =
59843             getDescriptor().getMessageTypes().get(11);
59844           internal_static_AssignRegionResponse_fieldAccessorTable = new
59845             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59846               internal_static_AssignRegionResponse_descriptor,
59847               new java.lang.String[] { });
59848           internal_static_UnassignRegionRequest_descriptor =
59849             getDescriptor().getMessageTypes().get(12);
59850           internal_static_UnassignRegionRequest_fieldAccessorTable = new
59851             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59852               internal_static_UnassignRegionRequest_descriptor,
59853               new java.lang.String[] { "Region", "Force", });
59854           internal_static_UnassignRegionResponse_descriptor =
59855             getDescriptor().getMessageTypes().get(13);
59856           internal_static_UnassignRegionResponse_fieldAccessorTable = new
59857             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59858               internal_static_UnassignRegionResponse_descriptor,
59859               new java.lang.String[] { });
59860           internal_static_OfflineRegionRequest_descriptor =
59861             getDescriptor().getMessageTypes().get(14);
59862           internal_static_OfflineRegionRequest_fieldAccessorTable = new
59863             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59864               internal_static_OfflineRegionRequest_descriptor,
59865               new java.lang.String[] { "Region", });
59866           internal_static_OfflineRegionResponse_descriptor =
59867             getDescriptor().getMessageTypes().get(15);
59868           internal_static_OfflineRegionResponse_fieldAccessorTable = new
59869             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59870               internal_static_OfflineRegionResponse_descriptor,
59871               new java.lang.String[] { });
59872           internal_static_CreateTableRequest_descriptor =
59873             getDescriptor().getMessageTypes().get(16);
59874           internal_static_CreateTableRequest_fieldAccessorTable = new
59875             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59876               internal_static_CreateTableRequest_descriptor,
59877               new java.lang.String[] { "TableSchema", "SplitKeys", "NonceGroup", "Nonce", });
59878           internal_static_CreateTableResponse_descriptor =
59879             getDescriptor().getMessageTypes().get(17);
59880           internal_static_CreateTableResponse_fieldAccessorTable = new
59881             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59882               internal_static_CreateTableResponse_descriptor,
59883               new java.lang.String[] { "ProcId", });
59884           internal_static_DeleteTableRequest_descriptor =
59885             getDescriptor().getMessageTypes().get(18);
59886           internal_static_DeleteTableRequest_fieldAccessorTable = new
59887             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59888               internal_static_DeleteTableRequest_descriptor,
59889               new java.lang.String[] { "TableName", "NonceGroup", "Nonce", });
59890           internal_static_DeleteTableResponse_descriptor =
59891             getDescriptor().getMessageTypes().get(19);
59892           internal_static_DeleteTableResponse_fieldAccessorTable = new
59893             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59894               internal_static_DeleteTableResponse_descriptor,
59895               new java.lang.String[] { "ProcId", });
59896           internal_static_TruncateTableRequest_descriptor =
59897             getDescriptor().getMessageTypes().get(20);
59898           internal_static_TruncateTableRequest_fieldAccessorTable = new
59899             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59900               internal_static_TruncateTableRequest_descriptor,
59901               new java.lang.String[] { "TableName", "PreserveSplits", "NonceGroup", "Nonce", });
59902           internal_static_TruncateTableResponse_descriptor =
59903             getDescriptor().getMessageTypes().get(21);
59904           internal_static_TruncateTableResponse_fieldAccessorTable = new
59905             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59906               internal_static_TruncateTableResponse_descriptor,
59907               new java.lang.String[] { });
59908           internal_static_EnableTableRequest_descriptor =
59909             getDescriptor().getMessageTypes().get(22);
59910           internal_static_EnableTableRequest_fieldAccessorTable = new
59911             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59912               internal_static_EnableTableRequest_descriptor,
59913               new java.lang.String[] { "TableName", "NonceGroup", "Nonce", });
59914           internal_static_EnableTableResponse_descriptor =
59915             getDescriptor().getMessageTypes().get(23);
59916           internal_static_EnableTableResponse_fieldAccessorTable = new
59917             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59918               internal_static_EnableTableResponse_descriptor,
59919               new java.lang.String[] { "ProcId", });
59920           internal_static_DisableTableRequest_descriptor =
59921             getDescriptor().getMessageTypes().get(24);
59922           internal_static_DisableTableRequest_fieldAccessorTable = new
59923             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59924               internal_static_DisableTableRequest_descriptor,
59925               new java.lang.String[] { "TableName", "NonceGroup", "Nonce", });
59926           internal_static_DisableTableResponse_descriptor =
59927             getDescriptor().getMessageTypes().get(25);
59928           internal_static_DisableTableResponse_fieldAccessorTable = new
59929             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59930               internal_static_DisableTableResponse_descriptor,
59931               new java.lang.String[] { "ProcId", });
59932           internal_static_ModifyTableRequest_descriptor =
59933             getDescriptor().getMessageTypes().get(26);
59934           internal_static_ModifyTableRequest_fieldAccessorTable = new
59935             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59936               internal_static_ModifyTableRequest_descriptor,
59937               new java.lang.String[] { "TableName", "TableSchema", "NonceGroup", "Nonce", });
59938           internal_static_ModifyTableResponse_descriptor =
59939             getDescriptor().getMessageTypes().get(27);
59940           internal_static_ModifyTableResponse_fieldAccessorTable = new
59941             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59942               internal_static_ModifyTableResponse_descriptor,
59943               new java.lang.String[] { });
59944           internal_static_CreateNamespaceRequest_descriptor =
59945             getDescriptor().getMessageTypes().get(28);
59946           internal_static_CreateNamespaceRequest_fieldAccessorTable = new
59947             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59948               internal_static_CreateNamespaceRequest_descriptor,
59949               new java.lang.String[] { "NamespaceDescriptor", });
59950           internal_static_CreateNamespaceResponse_descriptor =
59951             getDescriptor().getMessageTypes().get(29);
59952           internal_static_CreateNamespaceResponse_fieldAccessorTable = new
59953             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59954               internal_static_CreateNamespaceResponse_descriptor,
59955               new java.lang.String[] { });
59956           internal_static_DeleteNamespaceRequest_descriptor =
59957             getDescriptor().getMessageTypes().get(30);
59958           internal_static_DeleteNamespaceRequest_fieldAccessorTable = new
59959             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59960               internal_static_DeleteNamespaceRequest_descriptor,
59961               new java.lang.String[] { "NamespaceName", });
59962           internal_static_DeleteNamespaceResponse_descriptor =
59963             getDescriptor().getMessageTypes().get(31);
59964           internal_static_DeleteNamespaceResponse_fieldAccessorTable = new
59965             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59966               internal_static_DeleteNamespaceResponse_descriptor,
59967               new java.lang.String[] { });
59968           internal_static_ModifyNamespaceRequest_descriptor =
59969             getDescriptor().getMessageTypes().get(32);
59970           internal_static_ModifyNamespaceRequest_fieldAccessorTable = new
59971             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59972               internal_static_ModifyNamespaceRequest_descriptor,
59973               new java.lang.String[] { "NamespaceDescriptor", });
59974           internal_static_ModifyNamespaceResponse_descriptor =
59975             getDescriptor().getMessageTypes().get(33);
59976           internal_static_ModifyNamespaceResponse_fieldAccessorTable = new
59977             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59978               internal_static_ModifyNamespaceResponse_descriptor,
59979               new java.lang.String[] { });
59980           internal_static_GetNamespaceDescriptorRequest_descriptor =
59981             getDescriptor().getMessageTypes().get(34);
59982           internal_static_GetNamespaceDescriptorRequest_fieldAccessorTable = new
59983             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59984               internal_static_GetNamespaceDescriptorRequest_descriptor,
59985               new java.lang.String[] { "NamespaceName", });
59986           internal_static_GetNamespaceDescriptorResponse_descriptor =
59987             getDescriptor().getMessageTypes().get(35);
59988           internal_static_GetNamespaceDescriptorResponse_fieldAccessorTable = new
59989             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59990               internal_static_GetNamespaceDescriptorResponse_descriptor,
59991               new java.lang.String[] { "NamespaceDescriptor", });
59992           internal_static_ListNamespaceDescriptorsRequest_descriptor =
59993             getDescriptor().getMessageTypes().get(36);
59994           internal_static_ListNamespaceDescriptorsRequest_fieldAccessorTable = new
59995             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
59996               internal_static_ListNamespaceDescriptorsRequest_descriptor,
59997               new java.lang.String[] { });
59998           internal_static_ListNamespaceDescriptorsResponse_descriptor =
59999             getDescriptor().getMessageTypes().get(37);
60000           internal_static_ListNamespaceDescriptorsResponse_fieldAccessorTable = new
60001             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60002               internal_static_ListNamespaceDescriptorsResponse_descriptor,
60003               new java.lang.String[] { "NamespaceDescriptor", });
60004           internal_static_ListTableDescriptorsByNamespaceRequest_descriptor =
60005             getDescriptor().getMessageTypes().get(38);
60006           internal_static_ListTableDescriptorsByNamespaceRequest_fieldAccessorTable = new
60007             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60008               internal_static_ListTableDescriptorsByNamespaceRequest_descriptor,
60009               new java.lang.String[] { "NamespaceName", });
60010           internal_static_ListTableDescriptorsByNamespaceResponse_descriptor =
60011             getDescriptor().getMessageTypes().get(39);
60012           internal_static_ListTableDescriptorsByNamespaceResponse_fieldAccessorTable = new
60013             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60014               internal_static_ListTableDescriptorsByNamespaceResponse_descriptor,
60015               new java.lang.String[] { "TableSchema", });
60016           internal_static_ListTableNamesByNamespaceRequest_descriptor =
60017             getDescriptor().getMessageTypes().get(40);
60018           internal_static_ListTableNamesByNamespaceRequest_fieldAccessorTable = new
60019             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60020               internal_static_ListTableNamesByNamespaceRequest_descriptor,
60021               new java.lang.String[] { "NamespaceName", });
60022           internal_static_ListTableNamesByNamespaceResponse_descriptor =
60023             getDescriptor().getMessageTypes().get(41);
60024           internal_static_ListTableNamesByNamespaceResponse_fieldAccessorTable = new
60025             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60026               internal_static_ListTableNamesByNamespaceResponse_descriptor,
60027               new java.lang.String[] { "TableName", });
60028           internal_static_ShutdownRequest_descriptor =
60029             getDescriptor().getMessageTypes().get(42);
60030           internal_static_ShutdownRequest_fieldAccessorTable = new
60031             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60032               internal_static_ShutdownRequest_descriptor,
60033               new java.lang.String[] { });
60034           internal_static_ShutdownResponse_descriptor =
60035             getDescriptor().getMessageTypes().get(43);
60036           internal_static_ShutdownResponse_fieldAccessorTable = new
60037             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60038               internal_static_ShutdownResponse_descriptor,
60039               new java.lang.String[] { });
60040           internal_static_StopMasterRequest_descriptor =
60041             getDescriptor().getMessageTypes().get(44);
60042           internal_static_StopMasterRequest_fieldAccessorTable = new
60043             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60044               internal_static_StopMasterRequest_descriptor,
60045               new java.lang.String[] { });
60046           internal_static_StopMasterResponse_descriptor =
60047             getDescriptor().getMessageTypes().get(45);
60048           internal_static_StopMasterResponse_fieldAccessorTable = new
60049             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60050               internal_static_StopMasterResponse_descriptor,
60051               new java.lang.String[] { });
60052           internal_static_BalanceRequest_descriptor =
60053             getDescriptor().getMessageTypes().get(46);
60054           internal_static_BalanceRequest_fieldAccessorTable = new
60055             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60056               internal_static_BalanceRequest_descriptor,
60057               new java.lang.String[] { });
60058           internal_static_BalanceResponse_descriptor =
60059             getDescriptor().getMessageTypes().get(47);
60060           internal_static_BalanceResponse_fieldAccessorTable = new
60061             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60062               internal_static_BalanceResponse_descriptor,
60063               new java.lang.String[] { "BalancerRan", });
60064           internal_static_SetBalancerRunningRequest_descriptor =
60065             getDescriptor().getMessageTypes().get(48);
60066           internal_static_SetBalancerRunningRequest_fieldAccessorTable = new
60067             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60068               internal_static_SetBalancerRunningRequest_descriptor,
60069               new java.lang.String[] { "On", "Synchronous", });
60070           internal_static_SetBalancerRunningResponse_descriptor =
60071             getDescriptor().getMessageTypes().get(49);
60072           internal_static_SetBalancerRunningResponse_fieldAccessorTable = new
60073             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60074               internal_static_SetBalancerRunningResponse_descriptor,
60075               new java.lang.String[] { "PrevBalanceValue", });
60076           internal_static_IsBalancerEnabledRequest_descriptor =
60077             getDescriptor().getMessageTypes().get(50);
60078           internal_static_IsBalancerEnabledRequest_fieldAccessorTable = new
60079             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60080               internal_static_IsBalancerEnabledRequest_descriptor,
60081               new java.lang.String[] { });
60082           internal_static_IsBalancerEnabledResponse_descriptor =
60083             getDescriptor().getMessageTypes().get(51);
60084           internal_static_IsBalancerEnabledResponse_fieldAccessorTable = new
60085             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60086               internal_static_IsBalancerEnabledResponse_descriptor,
60087               new java.lang.String[] { "Enabled", });
60088           internal_static_NormalizeRequest_descriptor =
60089             getDescriptor().getMessageTypes().get(52);
60090           internal_static_NormalizeRequest_fieldAccessorTable = new
60091             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60092               internal_static_NormalizeRequest_descriptor,
60093               new java.lang.String[] { });
60094           internal_static_NormalizeResponse_descriptor =
60095             getDescriptor().getMessageTypes().get(53);
60096           internal_static_NormalizeResponse_fieldAccessorTable = new
60097             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60098               internal_static_NormalizeResponse_descriptor,
60099               new java.lang.String[] { "NormalizerRan", });
60100           internal_static_SetNormalizerRunningRequest_descriptor =
60101             getDescriptor().getMessageTypes().get(54);
60102           internal_static_SetNormalizerRunningRequest_fieldAccessorTable = new
60103             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60104               internal_static_SetNormalizerRunningRequest_descriptor,
60105               new java.lang.String[] { "On", });
60106           internal_static_SetNormalizerRunningResponse_descriptor =
60107             getDescriptor().getMessageTypes().get(55);
60108           internal_static_SetNormalizerRunningResponse_fieldAccessorTable = new
60109             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60110               internal_static_SetNormalizerRunningResponse_descriptor,
60111               new java.lang.String[] { "PrevNormalizerValue", });
60112           internal_static_IsNormalizerEnabledRequest_descriptor =
60113             getDescriptor().getMessageTypes().get(56);
60114           internal_static_IsNormalizerEnabledRequest_fieldAccessorTable = new
60115             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60116               internal_static_IsNormalizerEnabledRequest_descriptor,
60117               new java.lang.String[] { });
60118           internal_static_IsNormalizerEnabledResponse_descriptor =
60119             getDescriptor().getMessageTypes().get(57);
60120           internal_static_IsNormalizerEnabledResponse_fieldAccessorTable = new
60121             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60122               internal_static_IsNormalizerEnabledResponse_descriptor,
60123               new java.lang.String[] { "Enabled", });
60124           internal_static_RunCatalogScanRequest_descriptor =
60125             getDescriptor().getMessageTypes().get(58);
60126           internal_static_RunCatalogScanRequest_fieldAccessorTable = new
60127             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60128               internal_static_RunCatalogScanRequest_descriptor,
60129               new java.lang.String[] { });
60130           internal_static_RunCatalogScanResponse_descriptor =
60131             getDescriptor().getMessageTypes().get(59);
60132           internal_static_RunCatalogScanResponse_fieldAccessorTable = new
60133             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60134               internal_static_RunCatalogScanResponse_descriptor,
60135               new java.lang.String[] { "ScanResult", });
60136           internal_static_EnableCatalogJanitorRequest_descriptor =
60137             getDescriptor().getMessageTypes().get(60);
60138           internal_static_EnableCatalogJanitorRequest_fieldAccessorTable = new
60139             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60140               internal_static_EnableCatalogJanitorRequest_descriptor,
60141               new java.lang.String[] { "Enable", });
60142           internal_static_EnableCatalogJanitorResponse_descriptor =
60143             getDescriptor().getMessageTypes().get(61);
60144           internal_static_EnableCatalogJanitorResponse_fieldAccessorTable = new
60145             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60146               internal_static_EnableCatalogJanitorResponse_descriptor,
60147               new java.lang.String[] { "PrevValue", });
60148           internal_static_IsCatalogJanitorEnabledRequest_descriptor =
60149             getDescriptor().getMessageTypes().get(62);
60150           internal_static_IsCatalogJanitorEnabledRequest_fieldAccessorTable = new
60151             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60152               internal_static_IsCatalogJanitorEnabledRequest_descriptor,
60153               new java.lang.String[] { });
60154           internal_static_IsCatalogJanitorEnabledResponse_descriptor =
60155             getDescriptor().getMessageTypes().get(63);
60156           internal_static_IsCatalogJanitorEnabledResponse_fieldAccessorTable = new
60157             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60158               internal_static_IsCatalogJanitorEnabledResponse_descriptor,
60159               new java.lang.String[] { "Value", });
60160           internal_static_SnapshotRequest_descriptor =
60161             getDescriptor().getMessageTypes().get(64);
60162           internal_static_SnapshotRequest_fieldAccessorTable = new
60163             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60164               internal_static_SnapshotRequest_descriptor,
60165               new java.lang.String[] { "Snapshot", });
60166           internal_static_SnapshotResponse_descriptor =
60167             getDescriptor().getMessageTypes().get(65);
60168           internal_static_SnapshotResponse_fieldAccessorTable = new
60169             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60170               internal_static_SnapshotResponse_descriptor,
60171               new java.lang.String[] { "ExpectedTimeout", });
60172           internal_static_GetCompletedSnapshotsRequest_descriptor =
60173             getDescriptor().getMessageTypes().get(66);
60174           internal_static_GetCompletedSnapshotsRequest_fieldAccessorTable = new
60175             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60176               internal_static_GetCompletedSnapshotsRequest_descriptor,
60177               new java.lang.String[] { });
60178           internal_static_GetCompletedSnapshotsResponse_descriptor =
60179             getDescriptor().getMessageTypes().get(67);
60180           internal_static_GetCompletedSnapshotsResponse_fieldAccessorTable = new
60181             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60182               internal_static_GetCompletedSnapshotsResponse_descriptor,
60183               new java.lang.String[] { "Snapshots", });
60184           internal_static_DeleteSnapshotRequest_descriptor =
60185             getDescriptor().getMessageTypes().get(68);
60186           internal_static_DeleteSnapshotRequest_fieldAccessorTable = new
60187             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60188               internal_static_DeleteSnapshotRequest_descriptor,
60189               new java.lang.String[] { "Snapshot", });
60190           internal_static_DeleteSnapshotResponse_descriptor =
60191             getDescriptor().getMessageTypes().get(69);
60192           internal_static_DeleteSnapshotResponse_fieldAccessorTable = new
60193             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60194               internal_static_DeleteSnapshotResponse_descriptor,
60195               new java.lang.String[] { });
60196           internal_static_RestoreSnapshotRequest_descriptor =
60197             getDescriptor().getMessageTypes().get(70);
60198           internal_static_RestoreSnapshotRequest_fieldAccessorTable = new
60199             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60200               internal_static_RestoreSnapshotRequest_descriptor,
60201               new java.lang.String[] { "Snapshot", });
60202           internal_static_RestoreSnapshotResponse_descriptor =
60203             getDescriptor().getMessageTypes().get(71);
60204           internal_static_RestoreSnapshotResponse_fieldAccessorTable = new
60205             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60206               internal_static_RestoreSnapshotResponse_descriptor,
60207               new java.lang.String[] { });
60208           internal_static_IsSnapshotDoneRequest_descriptor =
60209             getDescriptor().getMessageTypes().get(72);
60210           internal_static_IsSnapshotDoneRequest_fieldAccessorTable = new
60211             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60212               internal_static_IsSnapshotDoneRequest_descriptor,
60213               new java.lang.String[] { "Snapshot", });
60214           internal_static_IsSnapshotDoneResponse_descriptor =
60215             getDescriptor().getMessageTypes().get(73);
60216           internal_static_IsSnapshotDoneResponse_fieldAccessorTable = new
60217             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60218               internal_static_IsSnapshotDoneResponse_descriptor,
60219               new java.lang.String[] { "Done", "Snapshot", });
60220           internal_static_IsRestoreSnapshotDoneRequest_descriptor =
60221             getDescriptor().getMessageTypes().get(74);
60222           internal_static_IsRestoreSnapshotDoneRequest_fieldAccessorTable = new
60223             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60224               internal_static_IsRestoreSnapshotDoneRequest_descriptor,
60225               new java.lang.String[] { "Snapshot", });
60226           internal_static_IsRestoreSnapshotDoneResponse_descriptor =
60227             getDescriptor().getMessageTypes().get(75);
60228           internal_static_IsRestoreSnapshotDoneResponse_fieldAccessorTable = new
60229             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60230               internal_static_IsRestoreSnapshotDoneResponse_descriptor,
60231               new java.lang.String[] { "Done", });
60232           internal_static_GetSchemaAlterStatusRequest_descriptor =
60233             getDescriptor().getMessageTypes().get(76);
60234           internal_static_GetSchemaAlterStatusRequest_fieldAccessorTable = new
60235             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60236               internal_static_GetSchemaAlterStatusRequest_descriptor,
60237               new java.lang.String[] { "TableName", });
60238           internal_static_GetSchemaAlterStatusResponse_descriptor =
60239             getDescriptor().getMessageTypes().get(77);
60240           internal_static_GetSchemaAlterStatusResponse_fieldAccessorTable = new
60241             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60242               internal_static_GetSchemaAlterStatusResponse_descriptor,
60243               new java.lang.String[] { "YetToUpdateRegions", "TotalRegions", });
60244           internal_static_GetTableDescriptorsRequest_descriptor =
60245             getDescriptor().getMessageTypes().get(78);
60246           internal_static_GetTableDescriptorsRequest_fieldAccessorTable = new
60247             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60248               internal_static_GetTableDescriptorsRequest_descriptor,
60249               new java.lang.String[] { "TableNames", "Regex", "IncludeSysTables", "Namespace", });
60250           internal_static_GetTableDescriptorsResponse_descriptor =
60251             getDescriptor().getMessageTypes().get(79);
60252           internal_static_GetTableDescriptorsResponse_fieldAccessorTable = new
60253             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60254               internal_static_GetTableDescriptorsResponse_descriptor,
60255               new java.lang.String[] { "TableSchema", });
60256           internal_static_GetTableNamesRequest_descriptor =
60257             getDescriptor().getMessageTypes().get(80);
60258           internal_static_GetTableNamesRequest_fieldAccessorTable = new
60259             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60260               internal_static_GetTableNamesRequest_descriptor,
60261               new java.lang.String[] { "Regex", "IncludeSysTables", "Namespace", });
60262           internal_static_GetTableNamesResponse_descriptor =
60263             getDescriptor().getMessageTypes().get(81);
60264           internal_static_GetTableNamesResponse_fieldAccessorTable = new
60265             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60266               internal_static_GetTableNamesResponse_descriptor,
60267               new java.lang.String[] { "TableNames", });
60268           internal_static_GetClusterStatusRequest_descriptor =
60269             getDescriptor().getMessageTypes().get(82);
60270           internal_static_GetClusterStatusRequest_fieldAccessorTable = new
60271             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60272               internal_static_GetClusterStatusRequest_descriptor,
60273               new java.lang.String[] { });
60274           internal_static_GetClusterStatusResponse_descriptor =
60275             getDescriptor().getMessageTypes().get(83);
60276           internal_static_GetClusterStatusResponse_fieldAccessorTable = new
60277             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60278               internal_static_GetClusterStatusResponse_descriptor,
60279               new java.lang.String[] { "ClusterStatus", });
60280           internal_static_IsMasterRunningRequest_descriptor =
60281             getDescriptor().getMessageTypes().get(84);
60282           internal_static_IsMasterRunningRequest_fieldAccessorTable = new
60283             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60284               internal_static_IsMasterRunningRequest_descriptor,
60285               new java.lang.String[] { });
60286           internal_static_IsMasterRunningResponse_descriptor =
60287             getDescriptor().getMessageTypes().get(85);
60288           internal_static_IsMasterRunningResponse_fieldAccessorTable = new
60289             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60290               internal_static_IsMasterRunningResponse_descriptor,
60291               new java.lang.String[] { "IsMasterRunning", });
60292           internal_static_ExecProcedureRequest_descriptor =
60293             getDescriptor().getMessageTypes().get(86);
60294           internal_static_ExecProcedureRequest_fieldAccessorTable = new
60295             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60296               internal_static_ExecProcedureRequest_descriptor,
60297               new java.lang.String[] { "Procedure", });
60298           internal_static_ExecProcedureResponse_descriptor =
60299             getDescriptor().getMessageTypes().get(87);
60300           internal_static_ExecProcedureResponse_fieldAccessorTable = new
60301             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60302               internal_static_ExecProcedureResponse_descriptor,
60303               new java.lang.String[] { "ExpectedTimeout", "ReturnData", });
60304           internal_static_IsProcedureDoneRequest_descriptor =
60305             getDescriptor().getMessageTypes().get(88);
60306           internal_static_IsProcedureDoneRequest_fieldAccessorTable = new
60307             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60308               internal_static_IsProcedureDoneRequest_descriptor,
60309               new java.lang.String[] { "Procedure", });
60310           internal_static_IsProcedureDoneResponse_descriptor =
60311             getDescriptor().getMessageTypes().get(89);
60312           internal_static_IsProcedureDoneResponse_fieldAccessorTable = new
60313             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60314               internal_static_IsProcedureDoneResponse_descriptor,
60315               new java.lang.String[] { "Done", "Snapshot", });
60316           internal_static_GetProcedureResultRequest_descriptor =
60317             getDescriptor().getMessageTypes().get(90);
60318           internal_static_GetProcedureResultRequest_fieldAccessorTable = new
60319             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60320               internal_static_GetProcedureResultRequest_descriptor,
60321               new java.lang.String[] { "ProcId", });
60322           internal_static_GetProcedureResultResponse_descriptor =
60323             getDescriptor().getMessageTypes().get(91);
60324           internal_static_GetProcedureResultResponse_fieldAccessorTable = new
60325             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60326               internal_static_GetProcedureResultResponse_descriptor,
60327               new java.lang.String[] { "State", "StartTime", "LastUpdate", "Result", "Exception", });
60328           internal_static_AbortProcedureRequest_descriptor =
60329             getDescriptor().getMessageTypes().get(92);
60330           internal_static_AbortProcedureRequest_fieldAccessorTable = new
60331             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60332               internal_static_AbortProcedureRequest_descriptor,
60333               new java.lang.String[] { "ProcId", "MayInterruptIfRunning", });
60334           internal_static_AbortProcedureResponse_descriptor =
60335             getDescriptor().getMessageTypes().get(93);
60336           internal_static_AbortProcedureResponse_fieldAccessorTable = new
60337             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60338               internal_static_AbortProcedureResponse_descriptor,
60339               new java.lang.String[] { "IsProcedureAborted", });
60340           internal_static_ListProceduresRequest_descriptor =
60341             getDescriptor().getMessageTypes().get(94);
60342           internal_static_ListProceduresRequest_fieldAccessorTable = new
60343             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60344               internal_static_ListProceduresRequest_descriptor,
60345               new java.lang.String[] { });
60346           internal_static_ListProceduresResponse_descriptor =
60347             getDescriptor().getMessageTypes().get(95);
60348           internal_static_ListProceduresResponse_fieldAccessorTable = new
60349             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60350               internal_static_ListProceduresResponse_descriptor,
60351               new java.lang.String[] { "Procedure", });
60352           internal_static_SetQuotaRequest_descriptor =
60353             getDescriptor().getMessageTypes().get(96);
60354           internal_static_SetQuotaRequest_fieldAccessorTable = new
60355             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60356               internal_static_SetQuotaRequest_descriptor,
60357               new java.lang.String[] { "UserName", "UserGroup", "Namespace", "TableName", "RemoveAll", "BypassGlobals", "Throttle", });
60358           internal_static_SetQuotaResponse_descriptor =
60359             getDescriptor().getMessageTypes().get(97);
60360           internal_static_SetQuotaResponse_fieldAccessorTable = new
60361             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60362               internal_static_SetQuotaResponse_descriptor,
60363               new java.lang.String[] { });
60364           internal_static_MajorCompactionTimestampRequest_descriptor =
60365             getDescriptor().getMessageTypes().get(98);
60366           internal_static_MajorCompactionTimestampRequest_fieldAccessorTable = new
60367             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60368               internal_static_MajorCompactionTimestampRequest_descriptor,
60369               new java.lang.String[] { "TableName", });
60370           internal_static_MajorCompactionTimestampForRegionRequest_descriptor =
60371             getDescriptor().getMessageTypes().get(99);
60372           internal_static_MajorCompactionTimestampForRegionRequest_fieldAccessorTable = new
60373             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60374               internal_static_MajorCompactionTimestampForRegionRequest_descriptor,
60375               new java.lang.String[] { "Region", });
60376           internal_static_MajorCompactionTimestampResponse_descriptor =
60377             getDescriptor().getMessageTypes().get(100);
60378           internal_static_MajorCompactionTimestampResponse_fieldAccessorTable = new
60379             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60380               internal_static_MajorCompactionTimestampResponse_descriptor,
60381               new java.lang.String[] { "CompactionTimestamp", });
60382           internal_static_SecurityCapabilitiesRequest_descriptor =
60383             getDescriptor().getMessageTypes().get(101);
60384           internal_static_SecurityCapabilitiesRequest_fieldAccessorTable = new
60385             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60386               internal_static_SecurityCapabilitiesRequest_descriptor,
60387               new java.lang.String[] { });
60388           internal_static_SecurityCapabilitiesResponse_descriptor =
60389             getDescriptor().getMessageTypes().get(102);
60390           internal_static_SecurityCapabilitiesResponse_fieldAccessorTable = new
60391             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
60392               internal_static_SecurityCapabilitiesResponse_descriptor,
60393               new java.lang.String[] { "Capabilities", });
60394           return null;
60395         }
60396       };
60397     com.google.protobuf.Descriptors.FileDescriptor
60398       .internalBuildGeneratedFileFrom(descriptorData,
60399         new com.google.protobuf.Descriptors.FileDescriptor[] {
60400           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
60401           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
60402           org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(),
60403           org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.getDescriptor(),
60404           org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos.getDescriptor(),
60405           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor(),
60406         }, assigner);
60407   }
60408 
60409   // @@protoc_insertion_point(outer_class_scope)
60410 }
60411