1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: Quota.proto
3 
4 package org.apache.hadoop.hbase.protobuf.generated;
5 
6 public final class QuotaProtos {
QuotaProtos()7   private QuotaProtos() {}
registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8   public static void registerAllExtensions(
9       com.google.protobuf.ExtensionRegistry registry) {
10   }
11   /**
12    * Protobuf enum {@code QuotaScope}
13    */
14   public enum QuotaScope
15       implements com.google.protobuf.ProtocolMessageEnum {
16     /**
17      * <code>CLUSTER = 1;</code>
18      */
19     CLUSTER(0, 1),
20     /**
21      * <code>MACHINE = 2;</code>
22      */
23     MACHINE(1, 2),
24     ;
25 
26     /**
27      * <code>CLUSTER = 1;</code>
28      */
29     public static final int CLUSTER_VALUE = 1;
30     /**
31      * <code>MACHINE = 2;</code>
32      */
33     public static final int MACHINE_VALUE = 2;
34 
35 
getNumber()36     public final int getNumber() { return value; }
37 
valueOf(int value)38     public static QuotaScope valueOf(int value) {
39       switch (value) {
40         case 1: return CLUSTER;
41         case 2: return MACHINE;
42         default: return null;
43       }
44     }
45 
46     public static com.google.protobuf.Internal.EnumLiteMap<QuotaScope>
internalGetValueMap()47         internalGetValueMap() {
48       return internalValueMap;
49     }
50     private static com.google.protobuf.Internal.EnumLiteMap<QuotaScope>
51         internalValueMap =
52           new com.google.protobuf.Internal.EnumLiteMap<QuotaScope>() {
53             public QuotaScope findValueByNumber(int number) {
54               return QuotaScope.valueOf(number);
55             }
56           };
57 
58     public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()59         getValueDescriptor() {
60       return getDescriptor().getValues().get(index);
61     }
62     public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()63         getDescriptorForType() {
64       return getDescriptor();
65     }
66     public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()67         getDescriptor() {
68       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(0);
69     }
70 
71     private static final QuotaScope[] VALUES = values();
72 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)73     public static QuotaScope valueOf(
74         com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
75       if (desc.getType() != getDescriptor()) {
76         throw new java.lang.IllegalArgumentException(
77           "EnumValueDescriptor is not for this type.");
78       }
79       return VALUES[desc.getIndex()];
80     }
81 
82     private final int index;
83     private final int value;
84 
QuotaScope(int index, int value)85     private QuotaScope(int index, int value) {
86       this.index = index;
87       this.value = value;
88     }
89 
90     // @@protoc_insertion_point(enum_scope:QuotaScope)
91   }
92 
93   /**
94    * Protobuf enum {@code ThrottleType}
95    */
96   public enum ThrottleType
97       implements com.google.protobuf.ProtocolMessageEnum {
98     /**
99      * <code>REQUEST_NUMBER = 1;</code>
100      */
101     REQUEST_NUMBER(0, 1),
102     /**
103      * <code>REQUEST_SIZE = 2;</code>
104      */
105     REQUEST_SIZE(1, 2),
106     /**
107      * <code>WRITE_NUMBER = 3;</code>
108      */
109     WRITE_NUMBER(2, 3),
110     /**
111      * <code>WRITE_SIZE = 4;</code>
112      */
113     WRITE_SIZE(3, 4),
114     /**
115      * <code>READ_NUMBER = 5;</code>
116      */
117     READ_NUMBER(4, 5),
118     /**
119      * <code>READ_SIZE = 6;</code>
120      */
121     READ_SIZE(5, 6),
122     ;
123 
124     /**
125      * <code>REQUEST_NUMBER = 1;</code>
126      */
127     public static final int REQUEST_NUMBER_VALUE = 1;
128     /**
129      * <code>REQUEST_SIZE = 2;</code>
130      */
131     public static final int REQUEST_SIZE_VALUE = 2;
132     /**
133      * <code>WRITE_NUMBER = 3;</code>
134      */
135     public static final int WRITE_NUMBER_VALUE = 3;
136     /**
137      * <code>WRITE_SIZE = 4;</code>
138      */
139     public static final int WRITE_SIZE_VALUE = 4;
140     /**
141      * <code>READ_NUMBER = 5;</code>
142      */
143     public static final int READ_NUMBER_VALUE = 5;
144     /**
145      * <code>READ_SIZE = 6;</code>
146      */
147     public static final int READ_SIZE_VALUE = 6;
148 
149 
getNumber()150     public final int getNumber() { return value; }
151 
valueOf(int value)152     public static ThrottleType valueOf(int value) {
153       switch (value) {
154         case 1: return REQUEST_NUMBER;
155         case 2: return REQUEST_SIZE;
156         case 3: return WRITE_NUMBER;
157         case 4: return WRITE_SIZE;
158         case 5: return READ_NUMBER;
159         case 6: return READ_SIZE;
160         default: return null;
161       }
162     }
163 
164     public static com.google.protobuf.Internal.EnumLiteMap<ThrottleType>
internalGetValueMap()165         internalGetValueMap() {
166       return internalValueMap;
167     }
168     private static com.google.protobuf.Internal.EnumLiteMap<ThrottleType>
169         internalValueMap =
170           new com.google.protobuf.Internal.EnumLiteMap<ThrottleType>() {
171             public ThrottleType findValueByNumber(int number) {
172               return ThrottleType.valueOf(number);
173             }
174           };
175 
176     public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()177         getValueDescriptor() {
178       return getDescriptor().getValues().get(index);
179     }
180     public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()181         getDescriptorForType() {
182       return getDescriptor();
183     }
184     public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()185         getDescriptor() {
186       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(1);
187     }
188 
189     private static final ThrottleType[] VALUES = values();
190 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)191     public static ThrottleType valueOf(
192         com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
193       if (desc.getType() != getDescriptor()) {
194         throw new java.lang.IllegalArgumentException(
195           "EnumValueDescriptor is not for this type.");
196       }
197       return VALUES[desc.getIndex()];
198     }
199 
200     private final int index;
201     private final int value;
202 
ThrottleType(int index, int value)203     private ThrottleType(int index, int value) {
204       this.index = index;
205       this.value = value;
206     }
207 
208     // @@protoc_insertion_point(enum_scope:ThrottleType)
209   }
210 
211   /**
212    * Protobuf enum {@code QuotaType}
213    */
214   public enum QuotaType
215       implements com.google.protobuf.ProtocolMessageEnum {
216     /**
217      * <code>THROTTLE = 1;</code>
218      */
219     THROTTLE(0, 1),
220     ;
221 
222     /**
223      * <code>THROTTLE = 1;</code>
224      */
225     public static final int THROTTLE_VALUE = 1;
226 
227 
getNumber()228     public final int getNumber() { return value; }
229 
valueOf(int value)230     public static QuotaType valueOf(int value) {
231       switch (value) {
232         case 1: return THROTTLE;
233         default: return null;
234       }
235     }
236 
237     public static com.google.protobuf.Internal.EnumLiteMap<QuotaType>
internalGetValueMap()238         internalGetValueMap() {
239       return internalValueMap;
240     }
241     private static com.google.protobuf.Internal.EnumLiteMap<QuotaType>
242         internalValueMap =
243           new com.google.protobuf.Internal.EnumLiteMap<QuotaType>() {
244             public QuotaType findValueByNumber(int number) {
245               return QuotaType.valueOf(number);
246             }
247           };
248 
249     public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()250         getValueDescriptor() {
251       return getDescriptor().getValues().get(index);
252     }
253     public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()254         getDescriptorForType() {
255       return getDescriptor();
256     }
257     public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()258         getDescriptor() {
259       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.getDescriptor().getEnumTypes().get(2);
260     }
261 
262     private static final QuotaType[] VALUES = values();
263 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)264     public static QuotaType valueOf(
265         com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
266       if (desc.getType() != getDescriptor()) {
267         throw new java.lang.IllegalArgumentException(
268           "EnumValueDescriptor is not for this type.");
269       }
270       return VALUES[desc.getIndex()];
271     }
272 
273     private final int index;
274     private final int value;
275 
QuotaType(int index, int value)276     private QuotaType(int index, int value) {
277       this.index = index;
278       this.value = value;
279     }
280 
281     // @@protoc_insertion_point(enum_scope:QuotaType)
282   }
283 
284   public interface TimedQuotaOrBuilder
285       extends com.google.protobuf.MessageOrBuilder {
286 
287     // required .TimeUnit time_unit = 1;
288     /**
289      * <code>required .TimeUnit time_unit = 1;</code>
290      */
hasTimeUnit()291     boolean hasTimeUnit();
292     /**
293      * <code>required .TimeUnit time_unit = 1;</code>
294      */
getTimeUnit()295     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit();
296 
297     // optional uint64 soft_limit = 2;
298     /**
299      * <code>optional uint64 soft_limit = 2;</code>
300      */
hasSoftLimit()301     boolean hasSoftLimit();
302     /**
303      * <code>optional uint64 soft_limit = 2;</code>
304      */
getSoftLimit()305     long getSoftLimit();
306 
307     // optional float share = 3;
308     /**
309      * <code>optional float share = 3;</code>
310      */
hasShare()311     boolean hasShare();
312     /**
313      * <code>optional float share = 3;</code>
314      */
getShare()315     float getShare();
316 
317     // optional .QuotaScope scope = 4 [default = MACHINE];
318     /**
319      * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
320      */
hasScope()321     boolean hasScope();
322     /**
323      * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
324      */
getScope()325     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope getScope();
326   }
327   /**
328    * Protobuf type {@code TimedQuota}
329    */
330   public static final class TimedQuota extends
331       com.google.protobuf.GeneratedMessage
332       implements TimedQuotaOrBuilder {
333     // Use TimedQuota.newBuilder() to construct.
TimedQuota(com.google.protobuf.GeneratedMessage.Builder<?> builder)334     private TimedQuota(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
335       super(builder);
336       this.unknownFields = builder.getUnknownFields();
337     }
TimedQuota(boolean noInit)338     private TimedQuota(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
339 
340     private static final TimedQuota defaultInstance;
getDefaultInstance()341     public static TimedQuota getDefaultInstance() {
342       return defaultInstance;
343     }
344 
getDefaultInstanceForType()345     public TimedQuota getDefaultInstanceForType() {
346       return defaultInstance;
347     }
348 
349     private final com.google.protobuf.UnknownFieldSet unknownFields;
350     @java.lang.Override
351     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()352         getUnknownFields() {
353       return this.unknownFields;
354     }
TimedQuota( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)355     private TimedQuota(
356         com.google.protobuf.CodedInputStream input,
357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
358         throws com.google.protobuf.InvalidProtocolBufferException {
359       initFields();
360       int mutable_bitField0_ = 0;
361       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
362           com.google.protobuf.UnknownFieldSet.newBuilder();
363       try {
364         boolean done = false;
365         while (!done) {
366           int tag = input.readTag();
367           switch (tag) {
368             case 0:
369               done = true;
370               break;
371             default: {
372               if (!parseUnknownField(input, unknownFields,
373                                      extensionRegistry, tag)) {
374                 done = true;
375               }
376               break;
377             }
378             case 8: {
379               int rawValue = input.readEnum();
380               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.valueOf(rawValue);
381               if (value == null) {
382                 unknownFields.mergeVarintField(1, rawValue);
383               } else {
384                 bitField0_ |= 0x00000001;
385                 timeUnit_ = value;
386               }
387               break;
388             }
389             case 16: {
390               bitField0_ |= 0x00000002;
391               softLimit_ = input.readUInt64();
392               break;
393             }
394             case 29: {
395               bitField0_ |= 0x00000004;
396               share_ = input.readFloat();
397               break;
398             }
399             case 32: {
400               int rawValue = input.readEnum();
401               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope value = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.valueOf(rawValue);
402               if (value == null) {
403                 unknownFields.mergeVarintField(4, rawValue);
404               } else {
405                 bitField0_ |= 0x00000008;
406                 scope_ = value;
407               }
408               break;
409             }
410           }
411         }
412       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
413         throw e.setUnfinishedMessage(this);
414       } catch (java.io.IOException e) {
415         throw new com.google.protobuf.InvalidProtocolBufferException(
416             e.getMessage()).setUnfinishedMessage(this);
417       } finally {
418         this.unknownFields = unknownFields.build();
419         makeExtensionsImmutable();
420       }
421     }
422     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()423         getDescriptor() {
424       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_descriptor;
425     }
426 
427     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()428         internalGetFieldAccessorTable() {
429       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_fieldAccessorTable
430           .ensureFieldAccessorsInitialized(
431               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder.class);
432     }
433 
434     public static com.google.protobuf.Parser<TimedQuota> PARSER =
435         new com.google.protobuf.AbstractParser<TimedQuota>() {
436       public TimedQuota parsePartialFrom(
437           com.google.protobuf.CodedInputStream input,
438           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
439           throws com.google.protobuf.InvalidProtocolBufferException {
440         return new TimedQuota(input, extensionRegistry);
441       }
442     };
443 
444     @java.lang.Override
getParserForType()445     public com.google.protobuf.Parser<TimedQuota> getParserForType() {
446       return PARSER;
447     }
448 
449     private int bitField0_;
450     // required .TimeUnit time_unit = 1;
451     public static final int TIME_UNIT_FIELD_NUMBER = 1;
452     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit timeUnit_;
453     /**
454      * <code>required .TimeUnit time_unit = 1;</code>
455      */
hasTimeUnit()456     public boolean hasTimeUnit() {
457       return ((bitField0_ & 0x00000001) == 0x00000001);
458     }
459     /**
460      * <code>required .TimeUnit time_unit = 1;</code>
461      */
getTimeUnit()462     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() {
463       return timeUnit_;
464     }
465 
466     // optional uint64 soft_limit = 2;
467     public static final int SOFT_LIMIT_FIELD_NUMBER = 2;
468     private long softLimit_;
469     /**
470      * <code>optional uint64 soft_limit = 2;</code>
471      */
hasSoftLimit()472     public boolean hasSoftLimit() {
473       return ((bitField0_ & 0x00000002) == 0x00000002);
474     }
475     /**
476      * <code>optional uint64 soft_limit = 2;</code>
477      */
getSoftLimit()478     public long getSoftLimit() {
479       return softLimit_;
480     }
481 
482     // optional float share = 3;
483     public static final int SHARE_FIELD_NUMBER = 3;
484     private float share_;
485     /**
486      * <code>optional float share = 3;</code>
487      */
hasShare()488     public boolean hasShare() {
489       return ((bitField0_ & 0x00000004) == 0x00000004);
490     }
491     /**
492      * <code>optional float share = 3;</code>
493      */
getShare()494     public float getShare() {
495       return share_;
496     }
497 
498     // optional .QuotaScope scope = 4 [default = MACHINE];
499     public static final int SCOPE_FIELD_NUMBER = 4;
500     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope scope_;
501     /**
502      * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
503      */
hasScope()504     public boolean hasScope() {
505       return ((bitField0_ & 0x00000008) == 0x00000008);
506     }
507     /**
508      * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
509      */
getScope()510     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope getScope() {
511       return scope_;
512     }
513 
initFields()514     private void initFields() {
515       timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
516       softLimit_ = 0L;
517       share_ = 0F;
518       scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
519     }
520     private byte memoizedIsInitialized = -1;
isInitialized()521     public final boolean isInitialized() {
522       byte isInitialized = memoizedIsInitialized;
523       if (isInitialized != -1) return isInitialized == 1;
524 
525       if (!hasTimeUnit()) {
526         memoizedIsInitialized = 0;
527         return false;
528       }
529       memoizedIsInitialized = 1;
530       return true;
531     }
532 
writeTo(com.google.protobuf.CodedOutputStream output)533     public void writeTo(com.google.protobuf.CodedOutputStream output)
534                         throws java.io.IOException {
535       getSerializedSize();
536       if (((bitField0_ & 0x00000001) == 0x00000001)) {
537         output.writeEnum(1, timeUnit_.getNumber());
538       }
539       if (((bitField0_ & 0x00000002) == 0x00000002)) {
540         output.writeUInt64(2, softLimit_);
541       }
542       if (((bitField0_ & 0x00000004) == 0x00000004)) {
543         output.writeFloat(3, share_);
544       }
545       if (((bitField0_ & 0x00000008) == 0x00000008)) {
546         output.writeEnum(4, scope_.getNumber());
547       }
548       getUnknownFields().writeTo(output);
549     }
550 
551     private int memoizedSerializedSize = -1;
getSerializedSize()552     public int getSerializedSize() {
553       int size = memoizedSerializedSize;
554       if (size != -1) return size;
555 
556       size = 0;
557       if (((bitField0_ & 0x00000001) == 0x00000001)) {
558         size += com.google.protobuf.CodedOutputStream
559           .computeEnumSize(1, timeUnit_.getNumber());
560       }
561       if (((bitField0_ & 0x00000002) == 0x00000002)) {
562         size += com.google.protobuf.CodedOutputStream
563           .computeUInt64Size(2, softLimit_);
564       }
565       if (((bitField0_ & 0x00000004) == 0x00000004)) {
566         size += com.google.protobuf.CodedOutputStream
567           .computeFloatSize(3, share_);
568       }
569       if (((bitField0_ & 0x00000008) == 0x00000008)) {
570         size += com.google.protobuf.CodedOutputStream
571           .computeEnumSize(4, scope_.getNumber());
572       }
573       size += getUnknownFields().getSerializedSize();
574       memoizedSerializedSize = size;
575       return size;
576     }
577 
578     private static final long serialVersionUID = 0L;
579     @java.lang.Override
writeReplace()580     protected java.lang.Object writeReplace()
581         throws java.io.ObjectStreamException {
582       return super.writeReplace();
583     }
584 
585     @java.lang.Override
equals(final java.lang.Object obj)586     public boolean equals(final java.lang.Object obj) {
587       if (obj == this) {
588        return true;
589       }
590       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota)) {
591         return super.equals(obj);
592       }
593       org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota) obj;
594 
595       boolean result = true;
596       result = result && (hasTimeUnit() == other.hasTimeUnit());
597       if (hasTimeUnit()) {
598         result = result &&
599             (getTimeUnit() == other.getTimeUnit());
600       }
601       result = result && (hasSoftLimit() == other.hasSoftLimit());
602       if (hasSoftLimit()) {
603         result = result && (getSoftLimit()
604             == other.getSoftLimit());
605       }
606       result = result && (hasShare() == other.hasShare());
607       if (hasShare()) {
608         result = result && (Float.floatToIntBits(getShare())    == Float.floatToIntBits(other.getShare()));
609       }
610       result = result && (hasScope() == other.hasScope());
611       if (hasScope()) {
612         result = result &&
613             (getScope() == other.getScope());
614       }
615       result = result &&
616           getUnknownFields().equals(other.getUnknownFields());
617       return result;
618     }
619 
620     private int memoizedHashCode = 0;
621     @java.lang.Override
hashCode()622     public int hashCode() {
623       if (memoizedHashCode != 0) {
624         return memoizedHashCode;
625       }
626       int hash = 41;
627       hash = (19 * hash) + getDescriptorForType().hashCode();
628       if (hasTimeUnit()) {
629         hash = (37 * hash) + TIME_UNIT_FIELD_NUMBER;
630         hash = (53 * hash) + hashEnum(getTimeUnit());
631       }
632       if (hasSoftLimit()) {
633         hash = (37 * hash) + SOFT_LIMIT_FIELD_NUMBER;
634         hash = (53 * hash) + hashLong(getSoftLimit());
635       }
636       if (hasShare()) {
637         hash = (37 * hash) + SHARE_FIELD_NUMBER;
638         hash = (53 * hash) + Float.floatToIntBits(
639             getShare());
640       }
641       if (hasScope()) {
642         hash = (37 * hash) + SCOPE_FIELD_NUMBER;
643         hash = (53 * hash) + hashEnum(getScope());
644       }
645       hash = (29 * hash) + getUnknownFields().hashCode();
646       memoizedHashCode = hash;
647       return hash;
648     }
649 
parseFrom( com.google.protobuf.ByteString data)650     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
651         com.google.protobuf.ByteString data)
652         throws com.google.protobuf.InvalidProtocolBufferException {
653       return PARSER.parseFrom(data);
654     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)655     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
656         com.google.protobuf.ByteString data,
657         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
658         throws com.google.protobuf.InvalidProtocolBufferException {
659       return PARSER.parseFrom(data, extensionRegistry);
660     }
parseFrom(byte[] data)661     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(byte[] data)
662         throws com.google.protobuf.InvalidProtocolBufferException {
663       return PARSER.parseFrom(data);
664     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)665     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
666         byte[] data,
667         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
668         throws com.google.protobuf.InvalidProtocolBufferException {
669       return PARSER.parseFrom(data, extensionRegistry);
670     }
parseFrom(java.io.InputStream input)671     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(java.io.InputStream input)
672         throws java.io.IOException {
673       return PARSER.parseFrom(input);
674     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)675     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
676         java.io.InputStream input,
677         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
678         throws java.io.IOException {
679       return PARSER.parseFrom(input, extensionRegistry);
680     }
parseDelimitedFrom(java.io.InputStream input)681     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(java.io.InputStream input)
682         throws java.io.IOException {
683       return PARSER.parseDelimitedFrom(input);
684     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)685     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseDelimitedFrom(
686         java.io.InputStream input,
687         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
688         throws java.io.IOException {
689       return PARSER.parseDelimitedFrom(input, extensionRegistry);
690     }
parseFrom( com.google.protobuf.CodedInputStream input)691     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
692         com.google.protobuf.CodedInputStream input)
693         throws java.io.IOException {
694       return PARSER.parseFrom(input);
695     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)696     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parseFrom(
697         com.google.protobuf.CodedInputStream input,
698         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
699         throws java.io.IOException {
700       return PARSER.parseFrom(input, extensionRegistry);
701     }
702 
newBuilder()703     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()704     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota prototype)705     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota prototype) {
706       return newBuilder().mergeFrom(prototype);
707     }
toBuilder()708     public Builder toBuilder() { return newBuilder(this); }
709 
710     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)711     protected Builder newBuilderForType(
712         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
713       Builder builder = new Builder(parent);
714       return builder;
715     }
716     /**
717      * Protobuf type {@code TimedQuota}
718      */
719     public static final class Builder extends
720         com.google.protobuf.GeneratedMessage.Builder<Builder>
721        implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder {
722       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()723           getDescriptor() {
724         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_descriptor;
725       }
726 
727       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()728           internalGetFieldAccessorTable() {
729         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_fieldAccessorTable
730             .ensureFieldAccessorsInitialized(
731                 org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder.class);
732       }
733 
734       // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder()
Builder()735       private Builder() {
736         maybeForceBuilderInitialization();
737       }
738 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)739       private Builder(
740           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
741         super(parent);
742         maybeForceBuilderInitialization();
743       }
maybeForceBuilderInitialization()744       private void maybeForceBuilderInitialization() {
745         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
746         }
747       }
create()748       private static Builder create() {
749         return new Builder();
750       }
751 
clear()752       public Builder clear() {
753         super.clear();
754         timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
755         bitField0_ = (bitField0_ & ~0x00000001);
756         softLimit_ = 0L;
757         bitField0_ = (bitField0_ & ~0x00000002);
758         share_ = 0F;
759         bitField0_ = (bitField0_ & ~0x00000004);
760         scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
761         bitField0_ = (bitField0_ & ~0x00000008);
762         return this;
763       }
764 
clone()765       public Builder clone() {
766         return create().mergeFrom(buildPartial());
767       }
768 
769       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()770           getDescriptorForType() {
771         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_TimedQuota_descriptor;
772       }
773 
getDefaultInstanceForType()774       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getDefaultInstanceForType() {
775         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
776       }
777 
build()778       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota build() {
779         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota result = buildPartial();
780         if (!result.isInitialized()) {
781           throw newUninitializedMessageException(result);
782         }
783         return result;
784       }
785 
buildPartial()786       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota buildPartial() {
787         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota(this);
788         int from_bitField0_ = bitField0_;
789         int to_bitField0_ = 0;
790         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
791           to_bitField0_ |= 0x00000001;
792         }
793         result.timeUnit_ = timeUnit_;
794         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
795           to_bitField0_ |= 0x00000002;
796         }
797         result.softLimit_ = softLimit_;
798         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
799           to_bitField0_ |= 0x00000004;
800         }
801         result.share_ = share_;
802         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
803           to_bitField0_ |= 0x00000008;
804         }
805         result.scope_ = scope_;
806         result.bitField0_ = to_bitField0_;
807         onBuilt();
808         return result;
809       }
810 
mergeFrom(com.google.protobuf.Message other)811       public Builder mergeFrom(com.google.protobuf.Message other) {
812         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota) {
813           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota)other);
814         } else {
815           super.mergeFrom(other);
816           return this;
817         }
818       }
819 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota other)820       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota other) {
821         if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) return this;
822         if (other.hasTimeUnit()) {
823           setTimeUnit(other.getTimeUnit());
824         }
825         if (other.hasSoftLimit()) {
826           setSoftLimit(other.getSoftLimit());
827         }
828         if (other.hasShare()) {
829           setShare(other.getShare());
830         }
831         if (other.hasScope()) {
832           setScope(other.getScope());
833         }
834         this.mergeUnknownFields(other.getUnknownFields());
835         return this;
836       }
837 
isInitialized()838       public final boolean isInitialized() {
839         if (!hasTimeUnit()) {
840 
841           return false;
842         }
843         return true;
844       }
845 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)846       public Builder mergeFrom(
847           com.google.protobuf.CodedInputStream input,
848           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
849           throws java.io.IOException {
850         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota parsedMessage = null;
851         try {
852           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
853         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
854           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota) e.getUnfinishedMessage();
855           throw e;
856         } finally {
857           if (parsedMessage != null) {
858             mergeFrom(parsedMessage);
859           }
860         }
861         return this;
862       }
863       private int bitField0_;
864 
865       // required .TimeUnit time_unit = 1;
866       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
867       /**
868        * <code>required .TimeUnit time_unit = 1;</code>
869        */
hasTimeUnit()870       public boolean hasTimeUnit() {
871         return ((bitField0_ & 0x00000001) == 0x00000001);
872       }
873       /**
874        * <code>required .TimeUnit time_unit = 1;</code>
875        */
getTimeUnit()876       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit getTimeUnit() {
877         return timeUnit_;
878       }
879       /**
880        * <code>required .TimeUnit time_unit = 1;</code>
881        */
setTimeUnit(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit value)882       public Builder setTimeUnit(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit value) {
883         if (value == null) {
884           throw new NullPointerException();
885         }
886         bitField0_ |= 0x00000001;
887         timeUnit_ = value;
888         onChanged();
889         return this;
890       }
891       /**
892        * <code>required .TimeUnit time_unit = 1;</code>
893        */
clearTimeUnit()894       public Builder clearTimeUnit() {
895         bitField0_ = (bitField0_ & ~0x00000001);
896         timeUnit_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeUnit.NANOSECONDS;
897         onChanged();
898         return this;
899       }
900 
901       // optional uint64 soft_limit = 2;
902       private long softLimit_ ;
903       /**
904        * <code>optional uint64 soft_limit = 2;</code>
905        */
hasSoftLimit()906       public boolean hasSoftLimit() {
907         return ((bitField0_ & 0x00000002) == 0x00000002);
908       }
909       /**
910        * <code>optional uint64 soft_limit = 2;</code>
911        */
getSoftLimit()912       public long getSoftLimit() {
913         return softLimit_;
914       }
915       /**
916        * <code>optional uint64 soft_limit = 2;</code>
917        */
setSoftLimit(long value)918       public Builder setSoftLimit(long value) {
919         bitField0_ |= 0x00000002;
920         softLimit_ = value;
921         onChanged();
922         return this;
923       }
924       /**
925        * <code>optional uint64 soft_limit = 2;</code>
926        */
clearSoftLimit()927       public Builder clearSoftLimit() {
928         bitField0_ = (bitField0_ & ~0x00000002);
929         softLimit_ = 0L;
930         onChanged();
931         return this;
932       }
933 
934       // optional float share = 3;
935       private float share_ ;
936       /**
937        * <code>optional float share = 3;</code>
938        */
hasShare()939       public boolean hasShare() {
940         return ((bitField0_ & 0x00000004) == 0x00000004);
941       }
942       /**
943        * <code>optional float share = 3;</code>
944        */
getShare()945       public float getShare() {
946         return share_;
947       }
948       /**
949        * <code>optional float share = 3;</code>
950        */
setShare(float value)951       public Builder setShare(float value) {
952         bitField0_ |= 0x00000004;
953         share_ = value;
954         onChanged();
955         return this;
956       }
957       /**
958        * <code>optional float share = 3;</code>
959        */
clearShare()960       public Builder clearShare() {
961         bitField0_ = (bitField0_ & ~0x00000004);
962         share_ = 0F;
963         onChanged();
964         return this;
965       }
966 
967       // optional .QuotaScope scope = 4 [default = MACHINE];
968       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
969       /**
970        * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
971        */
hasScope()972       public boolean hasScope() {
973         return ((bitField0_ & 0x00000008) == 0x00000008);
974       }
975       /**
976        * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
977        */
getScope()978       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope getScope() {
979         return scope_;
980       }
981       /**
982        * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
983        */
setScope(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope value)984       public Builder setScope(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope value) {
985         if (value == null) {
986           throw new NullPointerException();
987         }
988         bitField0_ |= 0x00000008;
989         scope_ = value;
990         onChanged();
991         return this;
992       }
993       /**
994        * <code>optional .QuotaScope scope = 4 [default = MACHINE];</code>
995        */
clearScope()996       public Builder clearScope() {
997         bitField0_ = (bitField0_ & ~0x00000008);
998         scope_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaScope.MACHINE;
999         onChanged();
1000         return this;
1001       }
1002 
1003       // @@protoc_insertion_point(builder_scope:TimedQuota)
1004     }
1005 
1006     static {
1007       defaultInstance = new TimedQuota(true);
defaultInstance.initFields()1008       defaultInstance.initFields();
1009     }
1010 
1011     // @@protoc_insertion_point(class_scope:TimedQuota)
1012   }
1013 
1014   public interface ThrottleOrBuilder
1015       extends com.google.protobuf.MessageOrBuilder {
1016 
1017     // optional .TimedQuota req_num = 1;
1018     /**
1019      * <code>optional .TimedQuota req_num = 1;</code>
1020      */
hasReqNum()1021     boolean hasReqNum();
1022     /**
1023      * <code>optional .TimedQuota req_num = 1;</code>
1024      */
getReqNum()1025     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqNum();
1026     /**
1027      * <code>optional .TimedQuota req_num = 1;</code>
1028      */
getReqNumOrBuilder()1029     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder();
1030 
1031     // optional .TimedQuota req_size = 2;
1032     /**
1033      * <code>optional .TimedQuota req_size = 2;</code>
1034      */
hasReqSize()1035     boolean hasReqSize();
1036     /**
1037      * <code>optional .TimedQuota req_size = 2;</code>
1038      */
getReqSize()1039     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqSize();
1040     /**
1041      * <code>optional .TimedQuota req_size = 2;</code>
1042      */
getReqSizeOrBuilder()1043     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder();
1044 
1045     // optional .TimedQuota write_num = 3;
1046     /**
1047      * <code>optional .TimedQuota write_num = 3;</code>
1048      */
hasWriteNum()1049     boolean hasWriteNum();
1050     /**
1051      * <code>optional .TimedQuota write_num = 3;</code>
1052      */
getWriteNum()1053     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteNum();
1054     /**
1055      * <code>optional .TimedQuota write_num = 3;</code>
1056      */
getWriteNumOrBuilder()1057     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder();
1058 
1059     // optional .TimedQuota write_size = 4;
1060     /**
1061      * <code>optional .TimedQuota write_size = 4;</code>
1062      */
hasWriteSize()1063     boolean hasWriteSize();
1064     /**
1065      * <code>optional .TimedQuota write_size = 4;</code>
1066      */
getWriteSize()1067     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteSize();
1068     /**
1069      * <code>optional .TimedQuota write_size = 4;</code>
1070      */
getWriteSizeOrBuilder()1071     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder();
1072 
1073     // optional .TimedQuota read_num = 5;
1074     /**
1075      * <code>optional .TimedQuota read_num = 5;</code>
1076      */
hasReadNum()1077     boolean hasReadNum();
1078     /**
1079      * <code>optional .TimedQuota read_num = 5;</code>
1080      */
getReadNum()1081     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadNum();
1082     /**
1083      * <code>optional .TimedQuota read_num = 5;</code>
1084      */
getReadNumOrBuilder()1085     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder();
1086 
1087     // optional .TimedQuota read_size = 6;
1088     /**
1089      * <code>optional .TimedQuota read_size = 6;</code>
1090      */
hasReadSize()1091     boolean hasReadSize();
1092     /**
1093      * <code>optional .TimedQuota read_size = 6;</code>
1094      */
getReadSize()1095     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadSize();
1096     /**
1097      * <code>optional .TimedQuota read_size = 6;</code>
1098      */
getReadSizeOrBuilder()1099     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder();
1100   }
1101   /**
1102    * Protobuf type {@code Throttle}
1103    */
1104   public static final class Throttle extends
1105       com.google.protobuf.GeneratedMessage
1106       implements ThrottleOrBuilder {
1107     // Use Throttle.newBuilder() to construct.
Throttle(com.google.protobuf.GeneratedMessage.Builder<?> builder)1108     private Throttle(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1109       super(builder);
1110       this.unknownFields = builder.getUnknownFields();
1111     }
Throttle(boolean noInit)1112     private Throttle(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1113 
1114     private static final Throttle defaultInstance;
getDefaultInstance()1115     public static Throttle getDefaultInstance() {
1116       return defaultInstance;
1117     }
1118 
getDefaultInstanceForType()1119     public Throttle getDefaultInstanceForType() {
1120       return defaultInstance;
1121     }
1122 
1123     private final com.google.protobuf.UnknownFieldSet unknownFields;
1124     @java.lang.Override
1125     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1126         getUnknownFields() {
1127       return this.unknownFields;
1128     }
Throttle( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1129     private Throttle(
1130         com.google.protobuf.CodedInputStream input,
1131         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1132         throws com.google.protobuf.InvalidProtocolBufferException {
1133       initFields();
1134       int mutable_bitField0_ = 0;
1135       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1136           com.google.protobuf.UnknownFieldSet.newBuilder();
1137       try {
1138         boolean done = false;
1139         while (!done) {
1140           int tag = input.readTag();
1141           switch (tag) {
1142             case 0:
1143               done = true;
1144               break;
1145             default: {
1146               if (!parseUnknownField(input, unknownFields,
1147                                      extensionRegistry, tag)) {
1148                 done = true;
1149               }
1150               break;
1151             }
1152             case 10: {
1153               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
1154               if (((bitField0_ & 0x00000001) == 0x00000001)) {
1155                 subBuilder = reqNum_.toBuilder();
1156               }
1157               reqNum_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
1158               if (subBuilder != null) {
1159                 subBuilder.mergeFrom(reqNum_);
1160                 reqNum_ = subBuilder.buildPartial();
1161               }
1162               bitField0_ |= 0x00000001;
1163               break;
1164             }
1165             case 18: {
1166               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
1167               if (((bitField0_ & 0x00000002) == 0x00000002)) {
1168                 subBuilder = reqSize_.toBuilder();
1169               }
1170               reqSize_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
1171               if (subBuilder != null) {
1172                 subBuilder.mergeFrom(reqSize_);
1173                 reqSize_ = subBuilder.buildPartial();
1174               }
1175               bitField0_ |= 0x00000002;
1176               break;
1177             }
1178             case 26: {
1179               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
1180               if (((bitField0_ & 0x00000004) == 0x00000004)) {
1181                 subBuilder = writeNum_.toBuilder();
1182               }
1183               writeNum_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
1184               if (subBuilder != null) {
1185                 subBuilder.mergeFrom(writeNum_);
1186                 writeNum_ = subBuilder.buildPartial();
1187               }
1188               bitField0_ |= 0x00000004;
1189               break;
1190             }
1191             case 34: {
1192               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
1193               if (((bitField0_ & 0x00000008) == 0x00000008)) {
1194                 subBuilder = writeSize_.toBuilder();
1195               }
1196               writeSize_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
1197               if (subBuilder != null) {
1198                 subBuilder.mergeFrom(writeSize_);
1199                 writeSize_ = subBuilder.buildPartial();
1200               }
1201               bitField0_ |= 0x00000008;
1202               break;
1203             }
1204             case 42: {
1205               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
1206               if (((bitField0_ & 0x00000010) == 0x00000010)) {
1207                 subBuilder = readNum_.toBuilder();
1208               }
1209               readNum_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
1210               if (subBuilder != null) {
1211                 subBuilder.mergeFrom(readNum_);
1212                 readNum_ = subBuilder.buildPartial();
1213               }
1214               bitField0_ |= 0x00000010;
1215               break;
1216             }
1217             case 50: {
1218               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
1219               if (((bitField0_ & 0x00000020) == 0x00000020)) {
1220                 subBuilder = readSize_.toBuilder();
1221               }
1222               readSize_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
1223               if (subBuilder != null) {
1224                 subBuilder.mergeFrom(readSize_);
1225                 readSize_ = subBuilder.buildPartial();
1226               }
1227               bitField0_ |= 0x00000020;
1228               break;
1229             }
1230           }
1231         }
1232       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1233         throw e.setUnfinishedMessage(this);
1234       } catch (java.io.IOException e) {
1235         throw new com.google.protobuf.InvalidProtocolBufferException(
1236             e.getMessage()).setUnfinishedMessage(this);
1237       } finally {
1238         this.unknownFields = unknownFields.build();
1239         makeExtensionsImmutable();
1240       }
1241     }
1242     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1243         getDescriptor() {
1244       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_descriptor;
1245     }
1246 
1247     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1248         internalGetFieldAccessorTable() {
1249       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_fieldAccessorTable
1250           .ensureFieldAccessorsInitialized(
1251               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder.class);
1252     }
1253 
1254     public static com.google.protobuf.Parser<Throttle> PARSER =
1255         new com.google.protobuf.AbstractParser<Throttle>() {
1256       public Throttle parsePartialFrom(
1257           com.google.protobuf.CodedInputStream input,
1258           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1259           throws com.google.protobuf.InvalidProtocolBufferException {
1260         return new Throttle(input, extensionRegistry);
1261       }
1262     };
1263 
1264     @java.lang.Override
getParserForType()1265     public com.google.protobuf.Parser<Throttle> getParserForType() {
1266       return PARSER;
1267     }
1268 
1269     private int bitField0_;
1270     // optional .TimedQuota req_num = 1;
1271     public static final int REQ_NUM_FIELD_NUMBER = 1;
1272     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqNum_;
1273     /**
1274      * <code>optional .TimedQuota req_num = 1;</code>
1275      */
hasReqNum()1276     public boolean hasReqNum() {
1277       return ((bitField0_ & 0x00000001) == 0x00000001);
1278     }
1279     /**
1280      * <code>optional .TimedQuota req_num = 1;</code>
1281      */
getReqNum()1282     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqNum() {
1283       return reqNum_;
1284     }
1285     /**
1286      * <code>optional .TimedQuota req_num = 1;</code>
1287      */
getReqNumOrBuilder()1288     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() {
1289       return reqNum_;
1290     }
1291 
1292     // optional .TimedQuota req_size = 2;
1293     public static final int REQ_SIZE_FIELD_NUMBER = 2;
1294     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqSize_;
1295     /**
1296      * <code>optional .TimedQuota req_size = 2;</code>
1297      */
hasReqSize()1298     public boolean hasReqSize() {
1299       return ((bitField0_ & 0x00000002) == 0x00000002);
1300     }
1301     /**
1302      * <code>optional .TimedQuota req_size = 2;</code>
1303      */
getReqSize()1304     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqSize() {
1305       return reqSize_;
1306     }
1307     /**
1308      * <code>optional .TimedQuota req_size = 2;</code>
1309      */
getReqSizeOrBuilder()1310     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() {
1311       return reqSize_;
1312     }
1313 
1314     // optional .TimedQuota write_num = 3;
1315     public static final int WRITE_NUM_FIELD_NUMBER = 3;
1316     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeNum_;
1317     /**
1318      * <code>optional .TimedQuota write_num = 3;</code>
1319      */
hasWriteNum()1320     public boolean hasWriteNum() {
1321       return ((bitField0_ & 0x00000004) == 0x00000004);
1322     }
1323     /**
1324      * <code>optional .TimedQuota write_num = 3;</code>
1325      */
getWriteNum()1326     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() {
1327       return writeNum_;
1328     }
1329     /**
1330      * <code>optional .TimedQuota write_num = 3;</code>
1331      */
getWriteNumOrBuilder()1332     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() {
1333       return writeNum_;
1334     }
1335 
1336     // optional .TimedQuota write_size = 4;
1337     public static final int WRITE_SIZE_FIELD_NUMBER = 4;
1338     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeSize_;
1339     /**
1340      * <code>optional .TimedQuota write_size = 4;</code>
1341      */
hasWriteSize()1342     public boolean hasWriteSize() {
1343       return ((bitField0_ & 0x00000008) == 0x00000008);
1344     }
1345     /**
1346      * <code>optional .TimedQuota write_size = 4;</code>
1347      */
getWriteSize()1348     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() {
1349       return writeSize_;
1350     }
1351     /**
1352      * <code>optional .TimedQuota write_size = 4;</code>
1353      */
getWriteSizeOrBuilder()1354     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() {
1355       return writeSize_;
1356     }
1357 
1358     // optional .TimedQuota read_num = 5;
1359     public static final int READ_NUM_FIELD_NUMBER = 5;
1360     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota readNum_;
1361     /**
1362      * <code>optional .TimedQuota read_num = 5;</code>
1363      */
hasReadNum()1364     public boolean hasReadNum() {
1365       return ((bitField0_ & 0x00000010) == 0x00000010);
1366     }
1367     /**
1368      * <code>optional .TimedQuota read_num = 5;</code>
1369      */
getReadNum()1370     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadNum() {
1371       return readNum_;
1372     }
1373     /**
1374      * <code>optional .TimedQuota read_num = 5;</code>
1375      */
getReadNumOrBuilder()1376     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder() {
1377       return readNum_;
1378     }
1379 
1380     // optional .TimedQuota read_size = 6;
1381     public static final int READ_SIZE_FIELD_NUMBER = 6;
1382     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota readSize_;
1383     /**
1384      * <code>optional .TimedQuota read_size = 6;</code>
1385      */
hasReadSize()1386     public boolean hasReadSize() {
1387       return ((bitField0_ & 0x00000020) == 0x00000020);
1388     }
1389     /**
1390      * <code>optional .TimedQuota read_size = 6;</code>
1391      */
getReadSize()1392     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadSize() {
1393       return readSize_;
1394     }
1395     /**
1396      * <code>optional .TimedQuota read_size = 6;</code>
1397      */
getReadSizeOrBuilder()1398     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder() {
1399       return readSize_;
1400     }
1401 
initFields()1402     private void initFields() {
1403       reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1404       reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1405       writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1406       writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1407       readNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1408       readSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1409     }
1410     private byte memoizedIsInitialized = -1;
isInitialized()1411     public final boolean isInitialized() {
1412       byte isInitialized = memoizedIsInitialized;
1413       if (isInitialized != -1) return isInitialized == 1;
1414 
1415       if (hasReqNum()) {
1416         if (!getReqNum().isInitialized()) {
1417           memoizedIsInitialized = 0;
1418           return false;
1419         }
1420       }
1421       if (hasReqSize()) {
1422         if (!getReqSize().isInitialized()) {
1423           memoizedIsInitialized = 0;
1424           return false;
1425         }
1426       }
1427       if (hasWriteNum()) {
1428         if (!getWriteNum().isInitialized()) {
1429           memoizedIsInitialized = 0;
1430           return false;
1431         }
1432       }
1433       if (hasWriteSize()) {
1434         if (!getWriteSize().isInitialized()) {
1435           memoizedIsInitialized = 0;
1436           return false;
1437         }
1438       }
1439       if (hasReadNum()) {
1440         if (!getReadNum().isInitialized()) {
1441           memoizedIsInitialized = 0;
1442           return false;
1443         }
1444       }
1445       if (hasReadSize()) {
1446         if (!getReadSize().isInitialized()) {
1447           memoizedIsInitialized = 0;
1448           return false;
1449         }
1450       }
1451       memoizedIsInitialized = 1;
1452       return true;
1453     }
1454 
writeTo(com.google.protobuf.CodedOutputStream output)1455     public void writeTo(com.google.protobuf.CodedOutputStream output)
1456                         throws java.io.IOException {
1457       getSerializedSize();
1458       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1459         output.writeMessage(1, reqNum_);
1460       }
1461       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1462         output.writeMessage(2, reqSize_);
1463       }
1464       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1465         output.writeMessage(3, writeNum_);
1466       }
1467       if (((bitField0_ & 0x00000008) == 0x00000008)) {
1468         output.writeMessage(4, writeSize_);
1469       }
1470       if (((bitField0_ & 0x00000010) == 0x00000010)) {
1471         output.writeMessage(5, readNum_);
1472       }
1473       if (((bitField0_ & 0x00000020) == 0x00000020)) {
1474         output.writeMessage(6, readSize_);
1475       }
1476       getUnknownFields().writeTo(output);
1477     }
1478 
1479     private int memoizedSerializedSize = -1;
getSerializedSize()1480     public int getSerializedSize() {
1481       int size = memoizedSerializedSize;
1482       if (size != -1) return size;
1483 
1484       size = 0;
1485       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1486         size += com.google.protobuf.CodedOutputStream
1487           .computeMessageSize(1, reqNum_);
1488       }
1489       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1490         size += com.google.protobuf.CodedOutputStream
1491           .computeMessageSize(2, reqSize_);
1492       }
1493       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1494         size += com.google.protobuf.CodedOutputStream
1495           .computeMessageSize(3, writeNum_);
1496       }
1497       if (((bitField0_ & 0x00000008) == 0x00000008)) {
1498         size += com.google.protobuf.CodedOutputStream
1499           .computeMessageSize(4, writeSize_);
1500       }
1501       if (((bitField0_ & 0x00000010) == 0x00000010)) {
1502         size += com.google.protobuf.CodedOutputStream
1503           .computeMessageSize(5, readNum_);
1504       }
1505       if (((bitField0_ & 0x00000020) == 0x00000020)) {
1506         size += com.google.protobuf.CodedOutputStream
1507           .computeMessageSize(6, readSize_);
1508       }
1509       size += getUnknownFields().getSerializedSize();
1510       memoizedSerializedSize = size;
1511       return size;
1512     }
1513 
1514     private static final long serialVersionUID = 0L;
1515     @java.lang.Override
writeReplace()1516     protected java.lang.Object writeReplace()
1517         throws java.io.ObjectStreamException {
1518       return super.writeReplace();
1519     }
1520 
1521     @java.lang.Override
equals(final java.lang.Object obj)1522     public boolean equals(final java.lang.Object obj) {
1523       if (obj == this) {
1524        return true;
1525       }
1526       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle)) {
1527         return super.equals(obj);
1528       }
1529       org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle) obj;
1530 
1531       boolean result = true;
1532       result = result && (hasReqNum() == other.hasReqNum());
1533       if (hasReqNum()) {
1534         result = result && getReqNum()
1535             .equals(other.getReqNum());
1536       }
1537       result = result && (hasReqSize() == other.hasReqSize());
1538       if (hasReqSize()) {
1539         result = result && getReqSize()
1540             .equals(other.getReqSize());
1541       }
1542       result = result && (hasWriteNum() == other.hasWriteNum());
1543       if (hasWriteNum()) {
1544         result = result && getWriteNum()
1545             .equals(other.getWriteNum());
1546       }
1547       result = result && (hasWriteSize() == other.hasWriteSize());
1548       if (hasWriteSize()) {
1549         result = result && getWriteSize()
1550             .equals(other.getWriteSize());
1551       }
1552       result = result && (hasReadNum() == other.hasReadNum());
1553       if (hasReadNum()) {
1554         result = result && getReadNum()
1555             .equals(other.getReadNum());
1556       }
1557       result = result && (hasReadSize() == other.hasReadSize());
1558       if (hasReadSize()) {
1559         result = result && getReadSize()
1560             .equals(other.getReadSize());
1561       }
1562       result = result &&
1563           getUnknownFields().equals(other.getUnknownFields());
1564       return result;
1565     }
1566 
1567     private int memoizedHashCode = 0;
1568     @java.lang.Override
hashCode()1569     public int hashCode() {
1570       if (memoizedHashCode != 0) {
1571         return memoizedHashCode;
1572       }
1573       int hash = 41;
1574       hash = (19 * hash) + getDescriptorForType().hashCode();
1575       if (hasReqNum()) {
1576         hash = (37 * hash) + REQ_NUM_FIELD_NUMBER;
1577         hash = (53 * hash) + getReqNum().hashCode();
1578       }
1579       if (hasReqSize()) {
1580         hash = (37 * hash) + REQ_SIZE_FIELD_NUMBER;
1581         hash = (53 * hash) + getReqSize().hashCode();
1582       }
1583       if (hasWriteNum()) {
1584         hash = (37 * hash) + WRITE_NUM_FIELD_NUMBER;
1585         hash = (53 * hash) + getWriteNum().hashCode();
1586       }
1587       if (hasWriteSize()) {
1588         hash = (37 * hash) + WRITE_SIZE_FIELD_NUMBER;
1589         hash = (53 * hash) + getWriteSize().hashCode();
1590       }
1591       if (hasReadNum()) {
1592         hash = (37 * hash) + READ_NUM_FIELD_NUMBER;
1593         hash = (53 * hash) + getReadNum().hashCode();
1594       }
1595       if (hasReadSize()) {
1596         hash = (37 * hash) + READ_SIZE_FIELD_NUMBER;
1597         hash = (53 * hash) + getReadSize().hashCode();
1598       }
1599       hash = (29 * hash) + getUnknownFields().hashCode();
1600       memoizedHashCode = hash;
1601       return hash;
1602     }
1603 
parseFrom( com.google.protobuf.ByteString data)1604     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
1605         com.google.protobuf.ByteString data)
1606         throws com.google.protobuf.InvalidProtocolBufferException {
1607       return PARSER.parseFrom(data);
1608     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1609     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
1610         com.google.protobuf.ByteString data,
1611         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1612         throws com.google.protobuf.InvalidProtocolBufferException {
1613       return PARSER.parseFrom(data, extensionRegistry);
1614     }
parseFrom(byte[] data)1615     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(byte[] data)
1616         throws com.google.protobuf.InvalidProtocolBufferException {
1617       return PARSER.parseFrom(data);
1618     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1619     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
1620         byte[] data,
1621         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1622         throws com.google.protobuf.InvalidProtocolBufferException {
1623       return PARSER.parseFrom(data, extensionRegistry);
1624     }
parseFrom(java.io.InputStream input)1625     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(java.io.InputStream input)
1626         throws java.io.IOException {
1627       return PARSER.parseFrom(input);
1628     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1629     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
1630         java.io.InputStream input,
1631         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1632         throws java.io.IOException {
1633       return PARSER.parseFrom(input, extensionRegistry);
1634     }
parseDelimitedFrom(java.io.InputStream input)1635     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(java.io.InputStream input)
1636         throws java.io.IOException {
1637       return PARSER.parseDelimitedFrom(input);
1638     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1639     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseDelimitedFrom(
1640         java.io.InputStream input,
1641         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1642         throws java.io.IOException {
1643       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1644     }
parseFrom( com.google.protobuf.CodedInputStream input)1645     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
1646         com.google.protobuf.CodedInputStream input)
1647         throws java.io.IOException {
1648       return PARSER.parseFrom(input);
1649     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1650     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parseFrom(
1651         com.google.protobuf.CodedInputStream input,
1652         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1653         throws java.io.IOException {
1654       return PARSER.parseFrom(input, extensionRegistry);
1655     }
1656 
newBuilder()1657     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1658     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle prototype)1659     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle prototype) {
1660       return newBuilder().mergeFrom(prototype);
1661     }
toBuilder()1662     public Builder toBuilder() { return newBuilder(this); }
1663 
1664     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1665     protected Builder newBuilderForType(
1666         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1667       Builder builder = new Builder(parent);
1668       return builder;
1669     }
1670     /**
1671      * Protobuf type {@code Throttle}
1672      */
1673     public static final class Builder extends
1674         com.google.protobuf.GeneratedMessage.Builder<Builder>
1675        implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder {
1676       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1677           getDescriptor() {
1678         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_descriptor;
1679       }
1680 
1681       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1682           internalGetFieldAccessorTable() {
1683         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_fieldAccessorTable
1684             .ensureFieldAccessorsInitialized(
1685                 org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder.class);
1686       }
1687 
1688       // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.newBuilder()
Builder()1689       private Builder() {
1690         maybeForceBuilderInitialization();
1691       }
1692 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1693       private Builder(
1694           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1695         super(parent);
1696         maybeForceBuilderInitialization();
1697       }
maybeForceBuilderInitialization()1698       private void maybeForceBuilderInitialization() {
1699         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1700           getReqNumFieldBuilder();
1701           getReqSizeFieldBuilder();
1702           getWriteNumFieldBuilder();
1703           getWriteSizeFieldBuilder();
1704           getReadNumFieldBuilder();
1705           getReadSizeFieldBuilder();
1706         }
1707       }
create()1708       private static Builder create() {
1709         return new Builder();
1710       }
1711 
clear()1712       public Builder clear() {
1713         super.clear();
1714         if (reqNumBuilder_ == null) {
1715           reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1716         } else {
1717           reqNumBuilder_.clear();
1718         }
1719         bitField0_ = (bitField0_ & ~0x00000001);
1720         if (reqSizeBuilder_ == null) {
1721           reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1722         } else {
1723           reqSizeBuilder_.clear();
1724         }
1725         bitField0_ = (bitField0_ & ~0x00000002);
1726         if (writeNumBuilder_ == null) {
1727           writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1728         } else {
1729           writeNumBuilder_.clear();
1730         }
1731         bitField0_ = (bitField0_ & ~0x00000004);
1732         if (writeSizeBuilder_ == null) {
1733           writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1734         } else {
1735           writeSizeBuilder_.clear();
1736         }
1737         bitField0_ = (bitField0_ & ~0x00000008);
1738         if (readNumBuilder_ == null) {
1739           readNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1740         } else {
1741           readNumBuilder_.clear();
1742         }
1743         bitField0_ = (bitField0_ & ~0x00000010);
1744         if (readSizeBuilder_ == null) {
1745           readSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1746         } else {
1747           readSizeBuilder_.clear();
1748         }
1749         bitField0_ = (bitField0_ & ~0x00000020);
1750         return this;
1751       }
1752 
clone()1753       public Builder clone() {
1754         return create().mergeFrom(buildPartial());
1755       }
1756 
1757       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1758           getDescriptorForType() {
1759         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Throttle_descriptor;
1760       }
1761 
getDefaultInstanceForType()1762       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle getDefaultInstanceForType() {
1763         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
1764       }
1765 
build()1766       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle build() {
1767         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle result = buildPartial();
1768         if (!result.isInitialized()) {
1769           throw newUninitializedMessageException(result);
1770         }
1771         return result;
1772       }
1773 
buildPartial()1774       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle buildPartial() {
1775         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle(this);
1776         int from_bitField0_ = bitField0_;
1777         int to_bitField0_ = 0;
1778         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1779           to_bitField0_ |= 0x00000001;
1780         }
1781         if (reqNumBuilder_ == null) {
1782           result.reqNum_ = reqNum_;
1783         } else {
1784           result.reqNum_ = reqNumBuilder_.build();
1785         }
1786         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1787           to_bitField0_ |= 0x00000002;
1788         }
1789         if (reqSizeBuilder_ == null) {
1790           result.reqSize_ = reqSize_;
1791         } else {
1792           result.reqSize_ = reqSizeBuilder_.build();
1793         }
1794         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1795           to_bitField0_ |= 0x00000004;
1796         }
1797         if (writeNumBuilder_ == null) {
1798           result.writeNum_ = writeNum_;
1799         } else {
1800           result.writeNum_ = writeNumBuilder_.build();
1801         }
1802         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
1803           to_bitField0_ |= 0x00000008;
1804         }
1805         if (writeSizeBuilder_ == null) {
1806           result.writeSize_ = writeSize_;
1807         } else {
1808           result.writeSize_ = writeSizeBuilder_.build();
1809         }
1810         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
1811           to_bitField0_ |= 0x00000010;
1812         }
1813         if (readNumBuilder_ == null) {
1814           result.readNum_ = readNum_;
1815         } else {
1816           result.readNum_ = readNumBuilder_.build();
1817         }
1818         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
1819           to_bitField0_ |= 0x00000020;
1820         }
1821         if (readSizeBuilder_ == null) {
1822           result.readSize_ = readSize_;
1823         } else {
1824           result.readSize_ = readSizeBuilder_.build();
1825         }
1826         result.bitField0_ = to_bitField0_;
1827         onBuilt();
1828         return result;
1829       }
1830 
mergeFrom(com.google.protobuf.Message other)1831       public Builder mergeFrom(com.google.protobuf.Message other) {
1832         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle) {
1833           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle)other);
1834         } else {
1835           super.mergeFrom(other);
1836           return this;
1837         }
1838       }
1839 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle other)1840       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle other) {
1841         if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance()) return this;
1842         if (other.hasReqNum()) {
1843           mergeReqNum(other.getReqNum());
1844         }
1845         if (other.hasReqSize()) {
1846           mergeReqSize(other.getReqSize());
1847         }
1848         if (other.hasWriteNum()) {
1849           mergeWriteNum(other.getWriteNum());
1850         }
1851         if (other.hasWriteSize()) {
1852           mergeWriteSize(other.getWriteSize());
1853         }
1854         if (other.hasReadNum()) {
1855           mergeReadNum(other.getReadNum());
1856         }
1857         if (other.hasReadSize()) {
1858           mergeReadSize(other.getReadSize());
1859         }
1860         this.mergeUnknownFields(other.getUnknownFields());
1861         return this;
1862       }
1863 
isInitialized()1864       public final boolean isInitialized() {
1865         if (hasReqNum()) {
1866           if (!getReqNum().isInitialized()) {
1867 
1868             return false;
1869           }
1870         }
1871         if (hasReqSize()) {
1872           if (!getReqSize().isInitialized()) {
1873 
1874             return false;
1875           }
1876         }
1877         if (hasWriteNum()) {
1878           if (!getWriteNum().isInitialized()) {
1879 
1880             return false;
1881           }
1882         }
1883         if (hasWriteSize()) {
1884           if (!getWriteSize().isInitialized()) {
1885 
1886             return false;
1887           }
1888         }
1889         if (hasReadNum()) {
1890           if (!getReadNum().isInitialized()) {
1891 
1892             return false;
1893           }
1894         }
1895         if (hasReadSize()) {
1896           if (!getReadSize().isInitialized()) {
1897 
1898             return false;
1899           }
1900         }
1901         return true;
1902       }
1903 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1904       public Builder mergeFrom(
1905           com.google.protobuf.CodedInputStream input,
1906           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1907           throws java.io.IOException {
1908         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle parsedMessage = null;
1909         try {
1910           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1911         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1912           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle) e.getUnfinishedMessage();
1913           throw e;
1914         } finally {
1915           if (parsedMessage != null) {
1916             mergeFrom(parsedMessage);
1917           }
1918         }
1919         return this;
1920       }
1921       private int bitField0_;
1922 
1923       // optional .TimedQuota req_num = 1;
1924       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1925       private com.google.protobuf.SingleFieldBuilder<
1926           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqNumBuilder_;
1927       /**
1928        * <code>optional .TimedQuota req_num = 1;</code>
1929        */
hasReqNum()1930       public boolean hasReqNum() {
1931         return ((bitField0_ & 0x00000001) == 0x00000001);
1932       }
1933       /**
1934        * <code>optional .TimedQuota req_num = 1;</code>
1935        */
getReqNum()1936       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqNum() {
1937         if (reqNumBuilder_ == null) {
1938           return reqNum_;
1939         } else {
1940           return reqNumBuilder_.getMessage();
1941         }
1942       }
1943       /**
1944        * <code>optional .TimedQuota req_num = 1;</code>
1945        */
setReqNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)1946       public Builder setReqNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
1947         if (reqNumBuilder_ == null) {
1948           if (value == null) {
1949             throw new NullPointerException();
1950           }
1951           reqNum_ = value;
1952           onChanged();
1953         } else {
1954           reqNumBuilder_.setMessage(value);
1955         }
1956         bitField0_ |= 0x00000001;
1957         return this;
1958       }
1959       /**
1960        * <code>optional .TimedQuota req_num = 1;</code>
1961        */
setReqNum( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)1962       public Builder setReqNum(
1963           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
1964         if (reqNumBuilder_ == null) {
1965           reqNum_ = builderForValue.build();
1966           onChanged();
1967         } else {
1968           reqNumBuilder_.setMessage(builderForValue.build());
1969         }
1970         bitField0_ |= 0x00000001;
1971         return this;
1972       }
1973       /**
1974        * <code>optional .TimedQuota req_num = 1;</code>
1975        */
mergeReqNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)1976       public Builder mergeReqNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
1977         if (reqNumBuilder_ == null) {
1978           if (((bitField0_ & 0x00000001) == 0x00000001) &&
1979               reqNum_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
1980             reqNum_ =
1981               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqNum_).mergeFrom(value).buildPartial();
1982           } else {
1983             reqNum_ = value;
1984           }
1985           onChanged();
1986         } else {
1987           reqNumBuilder_.mergeFrom(value);
1988         }
1989         bitField0_ |= 0x00000001;
1990         return this;
1991       }
1992       /**
1993        * <code>optional .TimedQuota req_num = 1;</code>
1994        */
clearReqNum()1995       public Builder clearReqNum() {
1996         if (reqNumBuilder_ == null) {
1997           reqNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
1998           onChanged();
1999         } else {
2000           reqNumBuilder_.clear();
2001         }
2002         bitField0_ = (bitField0_ & ~0x00000001);
2003         return this;
2004       }
2005       /**
2006        * <code>optional .TimedQuota req_num = 1;</code>
2007        */
getReqNumBuilder()2008       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqNumBuilder() {
2009         bitField0_ |= 0x00000001;
2010         onChanged();
2011         return getReqNumFieldBuilder().getBuilder();
2012       }
2013       /**
2014        * <code>optional .TimedQuota req_num = 1;</code>
2015        */
getReqNumOrBuilder()2016       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqNumOrBuilder() {
2017         if (reqNumBuilder_ != null) {
2018           return reqNumBuilder_.getMessageOrBuilder();
2019         } else {
2020           return reqNum_;
2021         }
2022       }
2023       /**
2024        * <code>optional .TimedQuota req_num = 1;</code>
2025        */
2026       private com.google.protobuf.SingleFieldBuilder<
2027           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReqNumFieldBuilder()2028           getReqNumFieldBuilder() {
2029         if (reqNumBuilder_ == null) {
2030           reqNumBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2031               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
2032                   reqNum_,
2033                   getParentForChildren(),
2034                   isClean());
2035           reqNum_ = null;
2036         }
2037         return reqNumBuilder_;
2038       }
2039 
2040       // optional .TimedQuota req_size = 2;
2041       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2042       private com.google.protobuf.SingleFieldBuilder<
2043           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> reqSizeBuilder_;
2044       /**
2045        * <code>optional .TimedQuota req_size = 2;</code>
2046        */
hasReqSize()2047       public boolean hasReqSize() {
2048         return ((bitField0_ & 0x00000002) == 0x00000002);
2049       }
2050       /**
2051        * <code>optional .TimedQuota req_size = 2;</code>
2052        */
getReqSize()2053       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReqSize() {
2054         if (reqSizeBuilder_ == null) {
2055           return reqSize_;
2056         } else {
2057           return reqSizeBuilder_.getMessage();
2058         }
2059       }
2060       /**
2061        * <code>optional .TimedQuota req_size = 2;</code>
2062        */
setReqSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2063       public Builder setReqSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2064         if (reqSizeBuilder_ == null) {
2065           if (value == null) {
2066             throw new NullPointerException();
2067           }
2068           reqSize_ = value;
2069           onChanged();
2070         } else {
2071           reqSizeBuilder_.setMessage(value);
2072         }
2073         bitField0_ |= 0x00000002;
2074         return this;
2075       }
2076       /**
2077        * <code>optional .TimedQuota req_size = 2;</code>
2078        */
setReqSize( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)2079       public Builder setReqSize(
2080           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
2081         if (reqSizeBuilder_ == null) {
2082           reqSize_ = builderForValue.build();
2083           onChanged();
2084         } else {
2085           reqSizeBuilder_.setMessage(builderForValue.build());
2086         }
2087         bitField0_ |= 0x00000002;
2088         return this;
2089       }
2090       /**
2091        * <code>optional .TimedQuota req_size = 2;</code>
2092        */
mergeReqSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2093       public Builder mergeReqSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2094         if (reqSizeBuilder_ == null) {
2095           if (((bitField0_ & 0x00000002) == 0x00000002) &&
2096               reqSize_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
2097             reqSize_ =
2098               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(reqSize_).mergeFrom(value).buildPartial();
2099           } else {
2100             reqSize_ = value;
2101           }
2102           onChanged();
2103         } else {
2104           reqSizeBuilder_.mergeFrom(value);
2105         }
2106         bitField0_ |= 0x00000002;
2107         return this;
2108       }
2109       /**
2110        * <code>optional .TimedQuota req_size = 2;</code>
2111        */
clearReqSize()2112       public Builder clearReqSize() {
2113         if (reqSizeBuilder_ == null) {
2114           reqSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2115           onChanged();
2116         } else {
2117           reqSizeBuilder_.clear();
2118         }
2119         bitField0_ = (bitField0_ & ~0x00000002);
2120         return this;
2121       }
2122       /**
2123        * <code>optional .TimedQuota req_size = 2;</code>
2124        */
getReqSizeBuilder()2125       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getReqSizeBuilder() {
2126         bitField0_ |= 0x00000002;
2127         onChanged();
2128         return getReqSizeFieldBuilder().getBuilder();
2129       }
2130       /**
2131        * <code>optional .TimedQuota req_size = 2;</code>
2132        */
getReqSizeOrBuilder()2133       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReqSizeOrBuilder() {
2134         if (reqSizeBuilder_ != null) {
2135           return reqSizeBuilder_.getMessageOrBuilder();
2136         } else {
2137           return reqSize_;
2138         }
2139       }
2140       /**
2141        * <code>optional .TimedQuota req_size = 2;</code>
2142        */
2143       private com.google.protobuf.SingleFieldBuilder<
2144           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReqSizeFieldBuilder()2145           getReqSizeFieldBuilder() {
2146         if (reqSizeBuilder_ == null) {
2147           reqSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2148               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
2149                   reqSize_,
2150                   getParentForChildren(),
2151                   isClean());
2152           reqSize_ = null;
2153         }
2154         return reqSizeBuilder_;
2155       }
2156 
2157       // optional .TimedQuota write_num = 3;
2158       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2159       private com.google.protobuf.SingleFieldBuilder<
2160           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeNumBuilder_;
2161       /**
2162        * <code>optional .TimedQuota write_num = 3;</code>
2163        */
hasWriteNum()2164       public boolean hasWriteNum() {
2165         return ((bitField0_ & 0x00000004) == 0x00000004);
2166       }
2167       /**
2168        * <code>optional .TimedQuota write_num = 3;</code>
2169        */
getWriteNum()2170       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteNum() {
2171         if (writeNumBuilder_ == null) {
2172           return writeNum_;
2173         } else {
2174           return writeNumBuilder_.getMessage();
2175         }
2176       }
2177       /**
2178        * <code>optional .TimedQuota write_num = 3;</code>
2179        */
setWriteNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2180       public Builder setWriteNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2181         if (writeNumBuilder_ == null) {
2182           if (value == null) {
2183             throw new NullPointerException();
2184           }
2185           writeNum_ = value;
2186           onChanged();
2187         } else {
2188           writeNumBuilder_.setMessage(value);
2189         }
2190         bitField0_ |= 0x00000004;
2191         return this;
2192       }
2193       /**
2194        * <code>optional .TimedQuota write_num = 3;</code>
2195        */
setWriteNum( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)2196       public Builder setWriteNum(
2197           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
2198         if (writeNumBuilder_ == null) {
2199           writeNum_ = builderForValue.build();
2200           onChanged();
2201         } else {
2202           writeNumBuilder_.setMessage(builderForValue.build());
2203         }
2204         bitField0_ |= 0x00000004;
2205         return this;
2206       }
2207       /**
2208        * <code>optional .TimedQuota write_num = 3;</code>
2209        */
mergeWriteNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2210       public Builder mergeWriteNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2211         if (writeNumBuilder_ == null) {
2212           if (((bitField0_ & 0x00000004) == 0x00000004) &&
2213               writeNum_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
2214             writeNum_ =
2215               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeNum_).mergeFrom(value).buildPartial();
2216           } else {
2217             writeNum_ = value;
2218           }
2219           onChanged();
2220         } else {
2221           writeNumBuilder_.mergeFrom(value);
2222         }
2223         bitField0_ |= 0x00000004;
2224         return this;
2225       }
2226       /**
2227        * <code>optional .TimedQuota write_num = 3;</code>
2228        */
clearWriteNum()2229       public Builder clearWriteNum() {
2230         if (writeNumBuilder_ == null) {
2231           writeNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2232           onChanged();
2233         } else {
2234           writeNumBuilder_.clear();
2235         }
2236         bitField0_ = (bitField0_ & ~0x00000004);
2237         return this;
2238       }
2239       /**
2240        * <code>optional .TimedQuota write_num = 3;</code>
2241        */
getWriteNumBuilder()2242       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteNumBuilder() {
2243         bitField0_ |= 0x00000004;
2244         onChanged();
2245         return getWriteNumFieldBuilder().getBuilder();
2246       }
2247       /**
2248        * <code>optional .TimedQuota write_num = 3;</code>
2249        */
getWriteNumOrBuilder()2250       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteNumOrBuilder() {
2251         if (writeNumBuilder_ != null) {
2252           return writeNumBuilder_.getMessageOrBuilder();
2253         } else {
2254           return writeNum_;
2255         }
2256       }
2257       /**
2258        * <code>optional .TimedQuota write_num = 3;</code>
2259        */
2260       private com.google.protobuf.SingleFieldBuilder<
2261           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getWriteNumFieldBuilder()2262           getWriteNumFieldBuilder() {
2263         if (writeNumBuilder_ == null) {
2264           writeNumBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2265               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
2266                   writeNum_,
2267                   getParentForChildren(),
2268                   isClean());
2269           writeNum_ = null;
2270         }
2271         return writeNumBuilder_;
2272       }
2273 
2274       // optional .TimedQuota write_size = 4;
2275       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2276       private com.google.protobuf.SingleFieldBuilder<
2277           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> writeSizeBuilder_;
2278       /**
2279        * <code>optional .TimedQuota write_size = 4;</code>
2280        */
hasWriteSize()2281       public boolean hasWriteSize() {
2282         return ((bitField0_ & 0x00000008) == 0x00000008);
2283       }
2284       /**
2285        * <code>optional .TimedQuota write_size = 4;</code>
2286        */
getWriteSize()2287       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getWriteSize() {
2288         if (writeSizeBuilder_ == null) {
2289           return writeSize_;
2290         } else {
2291           return writeSizeBuilder_.getMessage();
2292         }
2293       }
2294       /**
2295        * <code>optional .TimedQuota write_size = 4;</code>
2296        */
setWriteSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2297       public Builder setWriteSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2298         if (writeSizeBuilder_ == null) {
2299           if (value == null) {
2300             throw new NullPointerException();
2301           }
2302           writeSize_ = value;
2303           onChanged();
2304         } else {
2305           writeSizeBuilder_.setMessage(value);
2306         }
2307         bitField0_ |= 0x00000008;
2308         return this;
2309       }
2310       /**
2311        * <code>optional .TimedQuota write_size = 4;</code>
2312        */
setWriteSize( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)2313       public Builder setWriteSize(
2314           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
2315         if (writeSizeBuilder_ == null) {
2316           writeSize_ = builderForValue.build();
2317           onChanged();
2318         } else {
2319           writeSizeBuilder_.setMessage(builderForValue.build());
2320         }
2321         bitField0_ |= 0x00000008;
2322         return this;
2323       }
2324       /**
2325        * <code>optional .TimedQuota write_size = 4;</code>
2326        */
mergeWriteSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2327       public Builder mergeWriteSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2328         if (writeSizeBuilder_ == null) {
2329           if (((bitField0_ & 0x00000008) == 0x00000008) &&
2330               writeSize_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
2331             writeSize_ =
2332               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(writeSize_).mergeFrom(value).buildPartial();
2333           } else {
2334             writeSize_ = value;
2335           }
2336           onChanged();
2337         } else {
2338           writeSizeBuilder_.mergeFrom(value);
2339         }
2340         bitField0_ |= 0x00000008;
2341         return this;
2342       }
2343       /**
2344        * <code>optional .TimedQuota write_size = 4;</code>
2345        */
clearWriteSize()2346       public Builder clearWriteSize() {
2347         if (writeSizeBuilder_ == null) {
2348           writeSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2349           onChanged();
2350         } else {
2351           writeSizeBuilder_.clear();
2352         }
2353         bitField0_ = (bitField0_ & ~0x00000008);
2354         return this;
2355       }
2356       /**
2357        * <code>optional .TimedQuota write_size = 4;</code>
2358        */
getWriteSizeBuilder()2359       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getWriteSizeBuilder() {
2360         bitField0_ |= 0x00000008;
2361         onChanged();
2362         return getWriteSizeFieldBuilder().getBuilder();
2363       }
2364       /**
2365        * <code>optional .TimedQuota write_size = 4;</code>
2366        */
getWriteSizeOrBuilder()2367       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getWriteSizeOrBuilder() {
2368         if (writeSizeBuilder_ != null) {
2369           return writeSizeBuilder_.getMessageOrBuilder();
2370         } else {
2371           return writeSize_;
2372         }
2373       }
2374       /**
2375        * <code>optional .TimedQuota write_size = 4;</code>
2376        */
2377       private com.google.protobuf.SingleFieldBuilder<
2378           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getWriteSizeFieldBuilder()2379           getWriteSizeFieldBuilder() {
2380         if (writeSizeBuilder_ == null) {
2381           writeSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2382               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
2383                   writeSize_,
2384                   getParentForChildren(),
2385                   isClean());
2386           writeSize_ = null;
2387         }
2388         return writeSizeBuilder_;
2389       }
2390 
2391       // optional .TimedQuota read_num = 5;
2392       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota readNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2393       private com.google.protobuf.SingleFieldBuilder<
2394           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readNumBuilder_;
2395       /**
2396        * <code>optional .TimedQuota read_num = 5;</code>
2397        */
hasReadNum()2398       public boolean hasReadNum() {
2399         return ((bitField0_ & 0x00000010) == 0x00000010);
2400       }
2401       /**
2402        * <code>optional .TimedQuota read_num = 5;</code>
2403        */
getReadNum()2404       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadNum() {
2405         if (readNumBuilder_ == null) {
2406           return readNum_;
2407         } else {
2408           return readNumBuilder_.getMessage();
2409         }
2410       }
2411       /**
2412        * <code>optional .TimedQuota read_num = 5;</code>
2413        */
setReadNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2414       public Builder setReadNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2415         if (readNumBuilder_ == null) {
2416           if (value == null) {
2417             throw new NullPointerException();
2418           }
2419           readNum_ = value;
2420           onChanged();
2421         } else {
2422           readNumBuilder_.setMessage(value);
2423         }
2424         bitField0_ |= 0x00000010;
2425         return this;
2426       }
2427       /**
2428        * <code>optional .TimedQuota read_num = 5;</code>
2429        */
setReadNum( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)2430       public Builder setReadNum(
2431           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
2432         if (readNumBuilder_ == null) {
2433           readNum_ = builderForValue.build();
2434           onChanged();
2435         } else {
2436           readNumBuilder_.setMessage(builderForValue.build());
2437         }
2438         bitField0_ |= 0x00000010;
2439         return this;
2440       }
2441       /**
2442        * <code>optional .TimedQuota read_num = 5;</code>
2443        */
mergeReadNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2444       public Builder mergeReadNum(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2445         if (readNumBuilder_ == null) {
2446           if (((bitField0_ & 0x00000010) == 0x00000010) &&
2447               readNum_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
2448             readNum_ =
2449               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readNum_).mergeFrom(value).buildPartial();
2450           } else {
2451             readNum_ = value;
2452           }
2453           onChanged();
2454         } else {
2455           readNumBuilder_.mergeFrom(value);
2456         }
2457         bitField0_ |= 0x00000010;
2458         return this;
2459       }
2460       /**
2461        * <code>optional .TimedQuota read_num = 5;</code>
2462        */
clearReadNum()2463       public Builder clearReadNum() {
2464         if (readNumBuilder_ == null) {
2465           readNum_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2466           onChanged();
2467         } else {
2468           readNumBuilder_.clear();
2469         }
2470         bitField0_ = (bitField0_ & ~0x00000010);
2471         return this;
2472       }
2473       /**
2474        * <code>optional .TimedQuota read_num = 5;</code>
2475        */
getReadNumBuilder()2476       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getReadNumBuilder() {
2477         bitField0_ |= 0x00000010;
2478         onChanged();
2479         return getReadNumFieldBuilder().getBuilder();
2480       }
2481       /**
2482        * <code>optional .TimedQuota read_num = 5;</code>
2483        */
getReadNumOrBuilder()2484       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadNumOrBuilder() {
2485         if (readNumBuilder_ != null) {
2486           return readNumBuilder_.getMessageOrBuilder();
2487         } else {
2488           return readNum_;
2489         }
2490       }
2491       /**
2492        * <code>optional .TimedQuota read_num = 5;</code>
2493        */
2494       private com.google.protobuf.SingleFieldBuilder<
2495           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReadNumFieldBuilder()2496           getReadNumFieldBuilder() {
2497         if (readNumBuilder_ == null) {
2498           readNumBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2499               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
2500                   readNum_,
2501                   getParentForChildren(),
2502                   isClean());
2503           readNum_ = null;
2504         }
2505         return readNumBuilder_;
2506       }
2507 
2508       // optional .TimedQuota read_size = 6;
2509       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota readSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2510       private com.google.protobuf.SingleFieldBuilder<
2511           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> readSizeBuilder_;
2512       /**
2513        * <code>optional .TimedQuota read_size = 6;</code>
2514        */
hasReadSize()2515       public boolean hasReadSize() {
2516         return ((bitField0_ & 0x00000020) == 0x00000020);
2517       }
2518       /**
2519        * <code>optional .TimedQuota read_size = 6;</code>
2520        */
getReadSize()2521       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getReadSize() {
2522         if (readSizeBuilder_ == null) {
2523           return readSize_;
2524         } else {
2525           return readSizeBuilder_.getMessage();
2526         }
2527       }
2528       /**
2529        * <code>optional .TimedQuota read_size = 6;</code>
2530        */
setReadSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2531       public Builder setReadSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2532         if (readSizeBuilder_ == null) {
2533           if (value == null) {
2534             throw new NullPointerException();
2535           }
2536           readSize_ = value;
2537           onChanged();
2538         } else {
2539           readSizeBuilder_.setMessage(value);
2540         }
2541         bitField0_ |= 0x00000020;
2542         return this;
2543       }
2544       /**
2545        * <code>optional .TimedQuota read_size = 6;</code>
2546        */
setReadSize( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)2547       public Builder setReadSize(
2548           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
2549         if (readSizeBuilder_ == null) {
2550           readSize_ = builderForValue.build();
2551           onChanged();
2552         } else {
2553           readSizeBuilder_.setMessage(builderForValue.build());
2554         }
2555         bitField0_ |= 0x00000020;
2556         return this;
2557       }
2558       /**
2559        * <code>optional .TimedQuota read_size = 6;</code>
2560        */
mergeReadSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)2561       public Builder mergeReadSize(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
2562         if (readSizeBuilder_ == null) {
2563           if (((bitField0_ & 0x00000020) == 0x00000020) &&
2564               readSize_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
2565             readSize_ =
2566               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(readSize_).mergeFrom(value).buildPartial();
2567           } else {
2568             readSize_ = value;
2569           }
2570           onChanged();
2571         } else {
2572           readSizeBuilder_.mergeFrom(value);
2573         }
2574         bitField0_ |= 0x00000020;
2575         return this;
2576       }
2577       /**
2578        * <code>optional .TimedQuota read_size = 6;</code>
2579        */
clearReadSize()2580       public Builder clearReadSize() {
2581         if (readSizeBuilder_ == null) {
2582           readSize_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2583           onChanged();
2584         } else {
2585           readSizeBuilder_.clear();
2586         }
2587         bitField0_ = (bitField0_ & ~0x00000020);
2588         return this;
2589       }
2590       /**
2591        * <code>optional .TimedQuota read_size = 6;</code>
2592        */
getReadSizeBuilder()2593       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getReadSizeBuilder() {
2594         bitField0_ |= 0x00000020;
2595         onChanged();
2596         return getReadSizeFieldBuilder().getBuilder();
2597       }
2598       /**
2599        * <code>optional .TimedQuota read_size = 6;</code>
2600        */
getReadSizeOrBuilder()2601       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getReadSizeOrBuilder() {
2602         if (readSizeBuilder_ != null) {
2603           return readSizeBuilder_.getMessageOrBuilder();
2604         } else {
2605           return readSize_;
2606         }
2607       }
2608       /**
2609        * <code>optional .TimedQuota read_size = 6;</code>
2610        */
2611       private com.google.protobuf.SingleFieldBuilder<
2612           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getReadSizeFieldBuilder()2613           getReadSizeFieldBuilder() {
2614         if (readSizeBuilder_ == null) {
2615           readSizeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
2616               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
2617                   readSize_,
2618                   getParentForChildren(),
2619                   isClean());
2620           readSize_ = null;
2621         }
2622         return readSizeBuilder_;
2623       }
2624 
2625       // @@protoc_insertion_point(builder_scope:Throttle)
2626     }
2627 
2628     static {
2629       defaultInstance = new Throttle(true);
defaultInstance.initFields()2630       defaultInstance.initFields();
2631     }
2632 
2633     // @@protoc_insertion_point(class_scope:Throttle)
2634   }
2635 
2636   public interface ThrottleRequestOrBuilder
2637       extends com.google.protobuf.MessageOrBuilder {
2638 
2639     // optional .ThrottleType type = 1;
2640     /**
2641      * <code>optional .ThrottleType type = 1;</code>
2642      */
hasType()2643     boolean hasType();
2644     /**
2645      * <code>optional .ThrottleType type = 1;</code>
2646      */
getType()2647     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType getType();
2648 
2649     // optional .TimedQuota timed_quota = 2;
2650     /**
2651      * <code>optional .TimedQuota timed_quota = 2;</code>
2652      */
hasTimedQuota()2653     boolean hasTimedQuota();
2654     /**
2655      * <code>optional .TimedQuota timed_quota = 2;</code>
2656      */
getTimedQuota()2657     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota();
2658     /**
2659      * <code>optional .TimedQuota timed_quota = 2;</code>
2660      */
getTimedQuotaOrBuilder()2661     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder();
2662   }
2663   /**
2664    * Protobuf type {@code ThrottleRequest}
2665    */
2666   public static final class ThrottleRequest extends
2667       com.google.protobuf.GeneratedMessage
2668       implements ThrottleRequestOrBuilder {
2669     // Use ThrottleRequest.newBuilder() to construct.
ThrottleRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)2670     private ThrottleRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2671       super(builder);
2672       this.unknownFields = builder.getUnknownFields();
2673     }
ThrottleRequest(boolean noInit)2674     private ThrottleRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2675 
2676     private static final ThrottleRequest defaultInstance;
getDefaultInstance()2677     public static ThrottleRequest getDefaultInstance() {
2678       return defaultInstance;
2679     }
2680 
getDefaultInstanceForType()2681     public ThrottleRequest getDefaultInstanceForType() {
2682       return defaultInstance;
2683     }
2684 
2685     private final com.google.protobuf.UnknownFieldSet unknownFields;
2686     @java.lang.Override
2687     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2688         getUnknownFields() {
2689       return this.unknownFields;
2690     }
ThrottleRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2691     private ThrottleRequest(
2692         com.google.protobuf.CodedInputStream input,
2693         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2694         throws com.google.protobuf.InvalidProtocolBufferException {
2695       initFields();
2696       int mutable_bitField0_ = 0;
2697       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2698           com.google.protobuf.UnknownFieldSet.newBuilder();
2699       try {
2700         boolean done = false;
2701         while (!done) {
2702           int tag = input.readTag();
2703           switch (tag) {
2704             case 0:
2705               done = true;
2706               break;
2707             default: {
2708               if (!parseUnknownField(input, unknownFields,
2709                                      extensionRegistry, tag)) {
2710                 done = true;
2711               }
2712               break;
2713             }
2714             case 8: {
2715               int rawValue = input.readEnum();
2716               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType value = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType.valueOf(rawValue);
2717               if (value == null) {
2718                 unknownFields.mergeVarintField(1, rawValue);
2719               } else {
2720                 bitField0_ |= 0x00000001;
2721                 type_ = value;
2722               }
2723               break;
2724             }
2725             case 18: {
2726               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder subBuilder = null;
2727               if (((bitField0_ & 0x00000002) == 0x00000002)) {
2728                 subBuilder = timedQuota_.toBuilder();
2729               }
2730               timedQuota_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.PARSER, extensionRegistry);
2731               if (subBuilder != null) {
2732                 subBuilder.mergeFrom(timedQuota_);
2733                 timedQuota_ = subBuilder.buildPartial();
2734               }
2735               bitField0_ |= 0x00000002;
2736               break;
2737             }
2738           }
2739         }
2740       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2741         throw e.setUnfinishedMessage(this);
2742       } catch (java.io.IOException e) {
2743         throw new com.google.protobuf.InvalidProtocolBufferException(
2744             e.getMessage()).setUnfinishedMessage(this);
2745       } finally {
2746         this.unknownFields = unknownFields.build();
2747         makeExtensionsImmutable();
2748       }
2749     }
2750     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2751         getDescriptor() {
2752       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_ThrottleRequest_descriptor;
2753     }
2754 
2755     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2756         internalGetFieldAccessorTable() {
2757       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_ThrottleRequest_fieldAccessorTable
2758           .ensureFieldAccessorsInitialized(
2759               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder.class);
2760     }
2761 
2762     public static com.google.protobuf.Parser<ThrottleRequest> PARSER =
2763         new com.google.protobuf.AbstractParser<ThrottleRequest>() {
2764       public ThrottleRequest parsePartialFrom(
2765           com.google.protobuf.CodedInputStream input,
2766           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2767           throws com.google.protobuf.InvalidProtocolBufferException {
2768         return new ThrottleRequest(input, extensionRegistry);
2769       }
2770     };
2771 
2772     @java.lang.Override
getParserForType()2773     public com.google.protobuf.Parser<ThrottleRequest> getParserForType() {
2774       return PARSER;
2775     }
2776 
2777     private int bitField0_;
2778     // optional .ThrottleType type = 1;
2779     public static final int TYPE_FIELD_NUMBER = 1;
2780     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType type_;
2781     /**
2782      * <code>optional .ThrottleType type = 1;</code>
2783      */
hasType()2784     public boolean hasType() {
2785       return ((bitField0_ & 0x00000001) == 0x00000001);
2786     }
2787     /**
2788      * <code>optional .ThrottleType type = 1;</code>
2789      */
getType()2790     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType getType() {
2791       return type_;
2792     }
2793 
2794     // optional .TimedQuota timed_quota = 2;
2795     public static final int TIMED_QUOTA_FIELD_NUMBER = 2;
2796     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota timedQuota_;
2797     /**
2798      * <code>optional .TimedQuota timed_quota = 2;</code>
2799      */
hasTimedQuota()2800     public boolean hasTimedQuota() {
2801       return ((bitField0_ & 0x00000002) == 0x00000002);
2802     }
2803     /**
2804      * <code>optional .TimedQuota timed_quota = 2;</code>
2805      */
getTimedQuota()2806     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota() {
2807       return timedQuota_;
2808     }
2809     /**
2810      * <code>optional .TimedQuota timed_quota = 2;</code>
2811      */
getTimedQuotaOrBuilder()2812     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder() {
2813       return timedQuota_;
2814     }
2815 
initFields()2816     private void initFields() {
2817       type_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER;
2818       timedQuota_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
2819     }
2820     private byte memoizedIsInitialized = -1;
isInitialized()2821     public final boolean isInitialized() {
2822       byte isInitialized = memoizedIsInitialized;
2823       if (isInitialized != -1) return isInitialized == 1;
2824 
2825       if (hasTimedQuota()) {
2826         if (!getTimedQuota().isInitialized()) {
2827           memoizedIsInitialized = 0;
2828           return false;
2829         }
2830       }
2831       memoizedIsInitialized = 1;
2832       return true;
2833     }
2834 
writeTo(com.google.protobuf.CodedOutputStream output)2835     public void writeTo(com.google.protobuf.CodedOutputStream output)
2836                         throws java.io.IOException {
2837       getSerializedSize();
2838       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2839         output.writeEnum(1, type_.getNumber());
2840       }
2841       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2842         output.writeMessage(2, timedQuota_);
2843       }
2844       getUnknownFields().writeTo(output);
2845     }
2846 
2847     private int memoizedSerializedSize = -1;
getSerializedSize()2848     public int getSerializedSize() {
2849       int size = memoizedSerializedSize;
2850       if (size != -1) return size;
2851 
2852       size = 0;
2853       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2854         size += com.google.protobuf.CodedOutputStream
2855           .computeEnumSize(1, type_.getNumber());
2856       }
2857       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2858         size += com.google.protobuf.CodedOutputStream
2859           .computeMessageSize(2, timedQuota_);
2860       }
2861       size += getUnknownFields().getSerializedSize();
2862       memoizedSerializedSize = size;
2863       return size;
2864     }
2865 
2866     private static final long serialVersionUID = 0L;
2867     @java.lang.Override
writeReplace()2868     protected java.lang.Object writeReplace()
2869         throws java.io.ObjectStreamException {
2870       return super.writeReplace();
2871     }
2872 
2873     @java.lang.Override
equals(final java.lang.Object obj)2874     public boolean equals(final java.lang.Object obj) {
2875       if (obj == this) {
2876        return true;
2877       }
2878       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest)) {
2879         return super.equals(obj);
2880       }
2881       org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest) obj;
2882 
2883       boolean result = true;
2884       result = result && (hasType() == other.hasType());
2885       if (hasType()) {
2886         result = result &&
2887             (getType() == other.getType());
2888       }
2889       result = result && (hasTimedQuota() == other.hasTimedQuota());
2890       if (hasTimedQuota()) {
2891         result = result && getTimedQuota()
2892             .equals(other.getTimedQuota());
2893       }
2894       result = result &&
2895           getUnknownFields().equals(other.getUnknownFields());
2896       return result;
2897     }
2898 
2899     private int memoizedHashCode = 0;
2900     @java.lang.Override
hashCode()2901     public int hashCode() {
2902       if (memoizedHashCode != 0) {
2903         return memoizedHashCode;
2904       }
2905       int hash = 41;
2906       hash = (19 * hash) + getDescriptorForType().hashCode();
2907       if (hasType()) {
2908         hash = (37 * hash) + TYPE_FIELD_NUMBER;
2909         hash = (53 * hash) + hashEnum(getType());
2910       }
2911       if (hasTimedQuota()) {
2912         hash = (37 * hash) + TIMED_QUOTA_FIELD_NUMBER;
2913         hash = (53 * hash) + getTimedQuota().hashCode();
2914       }
2915       hash = (29 * hash) + getUnknownFields().hashCode();
2916       memoizedHashCode = hash;
2917       return hash;
2918     }
2919 
parseFrom( com.google.protobuf.ByteString data)2920     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
2921         com.google.protobuf.ByteString data)
2922         throws com.google.protobuf.InvalidProtocolBufferException {
2923       return PARSER.parseFrom(data);
2924     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2925     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
2926         com.google.protobuf.ByteString data,
2927         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2928         throws com.google.protobuf.InvalidProtocolBufferException {
2929       return PARSER.parseFrom(data, extensionRegistry);
2930     }
parseFrom(byte[] data)2931     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(byte[] data)
2932         throws com.google.protobuf.InvalidProtocolBufferException {
2933       return PARSER.parseFrom(data);
2934     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2935     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
2936         byte[] data,
2937         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2938         throws com.google.protobuf.InvalidProtocolBufferException {
2939       return PARSER.parseFrom(data, extensionRegistry);
2940     }
parseFrom(java.io.InputStream input)2941     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(java.io.InputStream input)
2942         throws java.io.IOException {
2943       return PARSER.parseFrom(input);
2944     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2945     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
2946         java.io.InputStream input,
2947         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2948         throws java.io.IOException {
2949       return PARSER.parseFrom(input, extensionRegistry);
2950     }
parseDelimitedFrom(java.io.InputStream input)2951     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseDelimitedFrom(java.io.InputStream input)
2952         throws java.io.IOException {
2953       return PARSER.parseDelimitedFrom(input);
2954     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2955     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseDelimitedFrom(
2956         java.io.InputStream input,
2957         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2958         throws java.io.IOException {
2959       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2960     }
parseFrom( com.google.protobuf.CodedInputStream input)2961     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
2962         com.google.protobuf.CodedInputStream input)
2963         throws java.io.IOException {
2964       return PARSER.parseFrom(input);
2965     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2966     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parseFrom(
2967         com.google.protobuf.CodedInputStream input,
2968         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2969         throws java.io.IOException {
2970       return PARSER.parseFrom(input, extensionRegistry);
2971     }
2972 
newBuilder()2973     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2974     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest prototype)2975     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest prototype) {
2976       return newBuilder().mergeFrom(prototype);
2977     }
toBuilder()2978     public Builder toBuilder() { return newBuilder(this); }
2979 
2980     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2981     protected Builder newBuilderForType(
2982         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2983       Builder builder = new Builder(parent);
2984       return builder;
2985     }
2986     /**
2987      * Protobuf type {@code ThrottleRequest}
2988      */
2989     public static final class Builder extends
2990         com.google.protobuf.GeneratedMessage.Builder<Builder>
2991        implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequestOrBuilder {
2992       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2993           getDescriptor() {
2994         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_ThrottleRequest_descriptor;
2995       }
2996 
2997       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2998           internalGetFieldAccessorTable() {
2999         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_ThrottleRequest_fieldAccessorTable
3000             .ensureFieldAccessorsInitialized(
3001                 org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.Builder.class);
3002       }
3003 
3004       // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.newBuilder()
Builder()3005       private Builder() {
3006         maybeForceBuilderInitialization();
3007       }
3008 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3009       private Builder(
3010           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3011         super(parent);
3012         maybeForceBuilderInitialization();
3013       }
maybeForceBuilderInitialization()3014       private void maybeForceBuilderInitialization() {
3015         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3016           getTimedQuotaFieldBuilder();
3017         }
3018       }
create()3019       private static Builder create() {
3020         return new Builder();
3021       }
3022 
clear()3023       public Builder clear() {
3024         super.clear();
3025         type_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER;
3026         bitField0_ = (bitField0_ & ~0x00000001);
3027         if (timedQuotaBuilder_ == null) {
3028           timedQuota_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
3029         } else {
3030           timedQuotaBuilder_.clear();
3031         }
3032         bitField0_ = (bitField0_ & ~0x00000002);
3033         return this;
3034       }
3035 
clone()3036       public Builder clone() {
3037         return create().mergeFrom(buildPartial());
3038       }
3039 
3040       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()3041           getDescriptorForType() {
3042         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_ThrottleRequest_descriptor;
3043       }
3044 
getDefaultInstanceForType()3045       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest getDefaultInstanceForType() {
3046         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance();
3047       }
3048 
build()3049       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest build() {
3050         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest result = buildPartial();
3051         if (!result.isInitialized()) {
3052           throw newUninitializedMessageException(result);
3053         }
3054         return result;
3055       }
3056 
buildPartial()3057       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest buildPartial() {
3058         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest(this);
3059         int from_bitField0_ = bitField0_;
3060         int to_bitField0_ = 0;
3061         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3062           to_bitField0_ |= 0x00000001;
3063         }
3064         result.type_ = type_;
3065         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3066           to_bitField0_ |= 0x00000002;
3067         }
3068         if (timedQuotaBuilder_ == null) {
3069           result.timedQuota_ = timedQuota_;
3070         } else {
3071           result.timedQuota_ = timedQuotaBuilder_.build();
3072         }
3073         result.bitField0_ = to_bitField0_;
3074         onBuilt();
3075         return result;
3076       }
3077 
mergeFrom(com.google.protobuf.Message other)3078       public Builder mergeFrom(com.google.protobuf.Message other) {
3079         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest) {
3080           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest)other);
3081         } else {
3082           super.mergeFrom(other);
3083           return this;
3084         }
3085       }
3086 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest other)3087       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest other) {
3088         if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest.getDefaultInstance()) return this;
3089         if (other.hasType()) {
3090           setType(other.getType());
3091         }
3092         if (other.hasTimedQuota()) {
3093           mergeTimedQuota(other.getTimedQuota());
3094         }
3095         this.mergeUnknownFields(other.getUnknownFields());
3096         return this;
3097       }
3098 
isInitialized()3099       public final boolean isInitialized() {
3100         if (hasTimedQuota()) {
3101           if (!getTimedQuota().isInitialized()) {
3102 
3103             return false;
3104           }
3105         }
3106         return true;
3107       }
3108 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3109       public Builder mergeFrom(
3110           com.google.protobuf.CodedInputStream input,
3111           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3112           throws java.io.IOException {
3113         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest parsedMessage = null;
3114         try {
3115           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3116         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3117           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleRequest) e.getUnfinishedMessage();
3118           throw e;
3119         } finally {
3120           if (parsedMessage != null) {
3121             mergeFrom(parsedMessage);
3122           }
3123         }
3124         return this;
3125       }
3126       private int bitField0_;
3127 
3128       // optional .ThrottleType type = 1;
3129       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType type_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER;
3130       /**
3131        * <code>optional .ThrottleType type = 1;</code>
3132        */
hasType()3133       public boolean hasType() {
3134         return ((bitField0_ & 0x00000001) == 0x00000001);
3135       }
3136       /**
3137        * <code>optional .ThrottleType type = 1;</code>
3138        */
getType()3139       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType getType() {
3140         return type_;
3141       }
3142       /**
3143        * <code>optional .ThrottleType type = 1;</code>
3144        */
setType(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType value)3145       public Builder setType(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType value) {
3146         if (value == null) {
3147           throw new NullPointerException();
3148         }
3149         bitField0_ |= 0x00000001;
3150         type_ = value;
3151         onChanged();
3152         return this;
3153       }
3154       /**
3155        * <code>optional .ThrottleType type = 1;</code>
3156        */
clearType()3157       public Builder clearType() {
3158         bitField0_ = (bitField0_ & ~0x00000001);
3159         type_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleType.REQUEST_NUMBER;
3160         onChanged();
3161         return this;
3162       }
3163 
3164       // optional .TimedQuota timed_quota = 2;
3165       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota timedQuota_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
3166       private com.google.protobuf.SingleFieldBuilder<
3167           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder> timedQuotaBuilder_;
3168       /**
3169        * <code>optional .TimedQuota timed_quota = 2;</code>
3170        */
hasTimedQuota()3171       public boolean hasTimedQuota() {
3172         return ((bitField0_ & 0x00000002) == 0x00000002);
3173       }
3174       /**
3175        * <code>optional .TimedQuota timed_quota = 2;</code>
3176        */
getTimedQuota()3177       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota getTimedQuota() {
3178         if (timedQuotaBuilder_ == null) {
3179           return timedQuota_;
3180         } else {
3181           return timedQuotaBuilder_.getMessage();
3182         }
3183       }
3184       /**
3185        * <code>optional .TimedQuota timed_quota = 2;</code>
3186        */
setTimedQuota(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)3187       public Builder setTimedQuota(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
3188         if (timedQuotaBuilder_ == null) {
3189           if (value == null) {
3190             throw new NullPointerException();
3191           }
3192           timedQuota_ = value;
3193           onChanged();
3194         } else {
3195           timedQuotaBuilder_.setMessage(value);
3196         }
3197         bitField0_ |= 0x00000002;
3198         return this;
3199       }
3200       /**
3201        * <code>optional .TimedQuota timed_quota = 2;</code>
3202        */
setTimedQuota( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue)3203       public Builder setTimedQuota(
3204           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder builderForValue) {
3205         if (timedQuotaBuilder_ == null) {
3206           timedQuota_ = builderForValue.build();
3207           onChanged();
3208         } else {
3209           timedQuotaBuilder_.setMessage(builderForValue.build());
3210         }
3211         bitField0_ |= 0x00000002;
3212         return this;
3213       }
3214       /**
3215        * <code>optional .TimedQuota timed_quota = 2;</code>
3216        */
mergeTimedQuota(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value)3217       public Builder mergeTimedQuota(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota value) {
3218         if (timedQuotaBuilder_ == null) {
3219           if (((bitField0_ & 0x00000002) == 0x00000002) &&
3220               timedQuota_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance()) {
3221             timedQuota_ =
3222               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.newBuilder(timedQuota_).mergeFrom(value).buildPartial();
3223           } else {
3224             timedQuota_ = value;
3225           }
3226           onChanged();
3227         } else {
3228           timedQuotaBuilder_.mergeFrom(value);
3229         }
3230         bitField0_ |= 0x00000002;
3231         return this;
3232       }
3233       /**
3234        * <code>optional .TimedQuota timed_quota = 2;</code>
3235        */
clearTimedQuota()3236       public Builder clearTimedQuota() {
3237         if (timedQuotaBuilder_ == null) {
3238           timedQuota_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.getDefaultInstance();
3239           onChanged();
3240         } else {
3241           timedQuotaBuilder_.clear();
3242         }
3243         bitField0_ = (bitField0_ & ~0x00000002);
3244         return this;
3245       }
3246       /**
3247        * <code>optional .TimedQuota timed_quota = 2;</code>
3248        */
getTimedQuotaBuilder()3249       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder getTimedQuotaBuilder() {
3250         bitField0_ |= 0x00000002;
3251         onChanged();
3252         return getTimedQuotaFieldBuilder().getBuilder();
3253       }
3254       /**
3255        * <code>optional .TimedQuota timed_quota = 2;</code>
3256        */
getTimedQuotaOrBuilder()3257       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder getTimedQuotaOrBuilder() {
3258         if (timedQuotaBuilder_ != null) {
3259           return timedQuotaBuilder_.getMessageOrBuilder();
3260         } else {
3261           return timedQuota_;
3262         }
3263       }
3264       /**
3265        * <code>optional .TimedQuota timed_quota = 2;</code>
3266        */
3267       private com.google.protobuf.SingleFieldBuilder<
3268           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>
getTimedQuotaFieldBuilder()3269           getTimedQuotaFieldBuilder() {
3270         if (timedQuotaBuilder_ == null) {
3271           timedQuotaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3272               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuota.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.TimedQuotaOrBuilder>(
3273                   timedQuota_,
3274                   getParentForChildren(),
3275                   isClean());
3276           timedQuota_ = null;
3277         }
3278         return timedQuotaBuilder_;
3279       }
3280 
3281       // @@protoc_insertion_point(builder_scope:ThrottleRequest)
3282     }
3283 
3284     static {
3285       defaultInstance = new ThrottleRequest(true);
defaultInstance.initFields()3286       defaultInstance.initFields();
3287     }
3288 
3289     // @@protoc_insertion_point(class_scope:ThrottleRequest)
3290   }
3291 
3292   public interface QuotasOrBuilder
3293       extends com.google.protobuf.MessageOrBuilder {
3294 
3295     // optional bool bypass_globals = 1 [default = false];
3296     /**
3297      * <code>optional bool bypass_globals = 1 [default = false];</code>
3298      */
hasBypassGlobals()3299     boolean hasBypassGlobals();
3300     /**
3301      * <code>optional bool bypass_globals = 1 [default = false];</code>
3302      */
getBypassGlobals()3303     boolean getBypassGlobals();
3304 
3305     // optional .Throttle throttle = 2;
3306     /**
3307      * <code>optional .Throttle throttle = 2;</code>
3308      */
hasThrottle()3309     boolean hasThrottle();
3310     /**
3311      * <code>optional .Throttle throttle = 2;</code>
3312      */
getThrottle()3313     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle getThrottle();
3314     /**
3315      * <code>optional .Throttle throttle = 2;</code>
3316      */
getThrottleOrBuilder()3317     org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder();
3318   }
3319   /**
3320    * Protobuf type {@code Quotas}
3321    */
3322   public static final class Quotas extends
3323       com.google.protobuf.GeneratedMessage
3324       implements QuotasOrBuilder {
3325     // Use Quotas.newBuilder() to construct.
Quotas(com.google.protobuf.GeneratedMessage.Builder<?> builder)3326     private Quotas(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3327       super(builder);
3328       this.unknownFields = builder.getUnknownFields();
3329     }
Quotas(boolean noInit)3330     private Quotas(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3331 
3332     private static final Quotas defaultInstance;
getDefaultInstance()3333     public static Quotas getDefaultInstance() {
3334       return defaultInstance;
3335     }
3336 
getDefaultInstanceForType()3337     public Quotas getDefaultInstanceForType() {
3338       return defaultInstance;
3339     }
3340 
3341     private final com.google.protobuf.UnknownFieldSet unknownFields;
3342     @java.lang.Override
3343     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()3344         getUnknownFields() {
3345       return this.unknownFields;
3346     }
Quotas( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3347     private Quotas(
3348         com.google.protobuf.CodedInputStream input,
3349         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3350         throws com.google.protobuf.InvalidProtocolBufferException {
3351       initFields();
3352       int mutable_bitField0_ = 0;
3353       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3354           com.google.protobuf.UnknownFieldSet.newBuilder();
3355       try {
3356         boolean done = false;
3357         while (!done) {
3358           int tag = input.readTag();
3359           switch (tag) {
3360             case 0:
3361               done = true;
3362               break;
3363             default: {
3364               if (!parseUnknownField(input, unknownFields,
3365                                      extensionRegistry, tag)) {
3366                 done = true;
3367               }
3368               break;
3369             }
3370             case 8: {
3371               bitField0_ |= 0x00000001;
3372               bypassGlobals_ = input.readBool();
3373               break;
3374             }
3375             case 18: {
3376               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder subBuilder = null;
3377               if (((bitField0_ & 0x00000002) == 0x00000002)) {
3378                 subBuilder = throttle_.toBuilder();
3379               }
3380               throttle_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.PARSER, extensionRegistry);
3381               if (subBuilder != null) {
3382                 subBuilder.mergeFrom(throttle_);
3383                 throttle_ = subBuilder.buildPartial();
3384               }
3385               bitField0_ |= 0x00000002;
3386               break;
3387             }
3388           }
3389         }
3390       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3391         throw e.setUnfinishedMessage(this);
3392       } catch (java.io.IOException e) {
3393         throw new com.google.protobuf.InvalidProtocolBufferException(
3394             e.getMessage()).setUnfinishedMessage(this);
3395       } finally {
3396         this.unknownFields = unknownFields.build();
3397         makeExtensionsImmutable();
3398       }
3399     }
3400     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3401         getDescriptor() {
3402       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Quotas_descriptor;
3403     }
3404 
3405     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3406         internalGetFieldAccessorTable() {
3407       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Quotas_fieldAccessorTable
3408           .ensureFieldAccessorsInitialized(
3409               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.Builder.class);
3410     }
3411 
3412     public static com.google.protobuf.Parser<Quotas> PARSER =
3413         new com.google.protobuf.AbstractParser<Quotas>() {
3414       public Quotas parsePartialFrom(
3415           com.google.protobuf.CodedInputStream input,
3416           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3417           throws com.google.protobuf.InvalidProtocolBufferException {
3418         return new Quotas(input, extensionRegistry);
3419       }
3420     };
3421 
3422     @java.lang.Override
getParserForType()3423     public com.google.protobuf.Parser<Quotas> getParserForType() {
3424       return PARSER;
3425     }
3426 
3427     private int bitField0_;
3428     // optional bool bypass_globals = 1 [default = false];
3429     public static final int BYPASS_GLOBALS_FIELD_NUMBER = 1;
3430     private boolean bypassGlobals_;
3431     /**
3432      * <code>optional bool bypass_globals = 1 [default = false];</code>
3433      */
hasBypassGlobals()3434     public boolean hasBypassGlobals() {
3435       return ((bitField0_ & 0x00000001) == 0x00000001);
3436     }
3437     /**
3438      * <code>optional bool bypass_globals = 1 [default = false];</code>
3439      */
getBypassGlobals()3440     public boolean getBypassGlobals() {
3441       return bypassGlobals_;
3442     }
3443 
3444     // optional .Throttle throttle = 2;
3445     public static final int THROTTLE_FIELD_NUMBER = 2;
3446     private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle throttle_;
3447     /**
3448      * <code>optional .Throttle throttle = 2;</code>
3449      */
hasThrottle()3450     public boolean hasThrottle() {
3451       return ((bitField0_ & 0x00000002) == 0x00000002);
3452     }
3453     /**
3454      * <code>optional .Throttle throttle = 2;</code>
3455      */
getThrottle()3456     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle getThrottle() {
3457       return throttle_;
3458     }
3459     /**
3460      * <code>optional .Throttle throttle = 2;</code>
3461      */
getThrottleOrBuilder()3462     public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder() {
3463       return throttle_;
3464     }
3465 
initFields()3466     private void initFields() {
3467       bypassGlobals_ = false;
3468       throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
3469     }
3470     private byte memoizedIsInitialized = -1;
isInitialized()3471     public final boolean isInitialized() {
3472       byte isInitialized = memoizedIsInitialized;
3473       if (isInitialized != -1) return isInitialized == 1;
3474 
3475       if (hasThrottle()) {
3476         if (!getThrottle().isInitialized()) {
3477           memoizedIsInitialized = 0;
3478           return false;
3479         }
3480       }
3481       memoizedIsInitialized = 1;
3482       return true;
3483     }
3484 
writeTo(com.google.protobuf.CodedOutputStream output)3485     public void writeTo(com.google.protobuf.CodedOutputStream output)
3486                         throws java.io.IOException {
3487       getSerializedSize();
3488       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3489         output.writeBool(1, bypassGlobals_);
3490       }
3491       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3492         output.writeMessage(2, throttle_);
3493       }
3494       getUnknownFields().writeTo(output);
3495     }
3496 
3497     private int memoizedSerializedSize = -1;
getSerializedSize()3498     public int getSerializedSize() {
3499       int size = memoizedSerializedSize;
3500       if (size != -1) return size;
3501 
3502       size = 0;
3503       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3504         size += com.google.protobuf.CodedOutputStream
3505           .computeBoolSize(1, bypassGlobals_);
3506       }
3507       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3508         size += com.google.protobuf.CodedOutputStream
3509           .computeMessageSize(2, throttle_);
3510       }
3511       size += getUnknownFields().getSerializedSize();
3512       memoizedSerializedSize = size;
3513       return size;
3514     }
3515 
3516     private static final long serialVersionUID = 0L;
3517     @java.lang.Override
writeReplace()3518     protected java.lang.Object writeReplace()
3519         throws java.io.ObjectStreamException {
3520       return super.writeReplace();
3521     }
3522 
3523     @java.lang.Override
equals(final java.lang.Object obj)3524     public boolean equals(final java.lang.Object obj) {
3525       if (obj == this) {
3526        return true;
3527       }
3528       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas)) {
3529         return super.equals(obj);
3530       }
3531       org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas) obj;
3532 
3533       boolean result = true;
3534       result = result && (hasBypassGlobals() == other.hasBypassGlobals());
3535       if (hasBypassGlobals()) {
3536         result = result && (getBypassGlobals()
3537             == other.getBypassGlobals());
3538       }
3539       result = result && (hasThrottle() == other.hasThrottle());
3540       if (hasThrottle()) {
3541         result = result && getThrottle()
3542             .equals(other.getThrottle());
3543       }
3544       result = result &&
3545           getUnknownFields().equals(other.getUnknownFields());
3546       return result;
3547     }
3548 
3549     private int memoizedHashCode = 0;
3550     @java.lang.Override
hashCode()3551     public int hashCode() {
3552       if (memoizedHashCode != 0) {
3553         return memoizedHashCode;
3554       }
3555       int hash = 41;
3556       hash = (19 * hash) + getDescriptorForType().hashCode();
3557       if (hasBypassGlobals()) {
3558         hash = (37 * hash) + BYPASS_GLOBALS_FIELD_NUMBER;
3559         hash = (53 * hash) + hashBoolean(getBypassGlobals());
3560       }
3561       if (hasThrottle()) {
3562         hash = (37 * hash) + THROTTLE_FIELD_NUMBER;
3563         hash = (53 * hash) + getThrottle().hashCode();
3564       }
3565       hash = (29 * hash) + getUnknownFields().hashCode();
3566       memoizedHashCode = hash;
3567       return hash;
3568     }
3569 
parseFrom( com.google.protobuf.ByteString data)3570     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(
3571         com.google.protobuf.ByteString data)
3572         throws com.google.protobuf.InvalidProtocolBufferException {
3573       return PARSER.parseFrom(data);
3574     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3575     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(
3576         com.google.protobuf.ByteString data,
3577         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3578         throws com.google.protobuf.InvalidProtocolBufferException {
3579       return PARSER.parseFrom(data, extensionRegistry);
3580     }
parseFrom(byte[] data)3581     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(byte[] data)
3582         throws com.google.protobuf.InvalidProtocolBufferException {
3583       return PARSER.parseFrom(data);
3584     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3585     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(
3586         byte[] data,
3587         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3588         throws com.google.protobuf.InvalidProtocolBufferException {
3589       return PARSER.parseFrom(data, extensionRegistry);
3590     }
parseFrom(java.io.InputStream input)3591     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(java.io.InputStream input)
3592         throws java.io.IOException {
3593       return PARSER.parseFrom(input);
3594     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3595     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(
3596         java.io.InputStream input,
3597         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3598         throws java.io.IOException {
3599       return PARSER.parseFrom(input, extensionRegistry);
3600     }
parseDelimitedFrom(java.io.InputStream input)3601     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseDelimitedFrom(java.io.InputStream input)
3602         throws java.io.IOException {
3603       return PARSER.parseDelimitedFrom(input);
3604     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3605     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseDelimitedFrom(
3606         java.io.InputStream input,
3607         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3608         throws java.io.IOException {
3609       return PARSER.parseDelimitedFrom(input, extensionRegistry);
3610     }
parseFrom( com.google.protobuf.CodedInputStream input)3611     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(
3612         com.google.protobuf.CodedInputStream input)
3613         throws java.io.IOException {
3614       return PARSER.parseFrom(input);
3615     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3616     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parseFrom(
3617         com.google.protobuf.CodedInputStream input,
3618         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3619         throws java.io.IOException {
3620       return PARSER.parseFrom(input, extensionRegistry);
3621     }
3622 
newBuilder()3623     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()3624     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas prototype)3625     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas prototype) {
3626       return newBuilder().mergeFrom(prototype);
3627     }
toBuilder()3628     public Builder toBuilder() { return newBuilder(this); }
3629 
3630     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3631     protected Builder newBuilderForType(
3632         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3633       Builder builder = new Builder(parent);
3634       return builder;
3635     }
3636     /**
3637      * Protobuf type {@code Quotas}
3638      */
3639     public static final class Builder extends
3640         com.google.protobuf.GeneratedMessage.Builder<Builder>
3641        implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotasOrBuilder {
3642       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3643           getDescriptor() {
3644         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Quotas_descriptor;
3645       }
3646 
3647       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3648           internalGetFieldAccessorTable() {
3649         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Quotas_fieldAccessorTable
3650             .ensureFieldAccessorsInitialized(
3651                 org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.Builder.class);
3652       }
3653 
3654       // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.newBuilder()
Builder()3655       private Builder() {
3656         maybeForceBuilderInitialization();
3657       }
3658 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3659       private Builder(
3660           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3661         super(parent);
3662         maybeForceBuilderInitialization();
3663       }
maybeForceBuilderInitialization()3664       private void maybeForceBuilderInitialization() {
3665         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3666           getThrottleFieldBuilder();
3667         }
3668       }
create()3669       private static Builder create() {
3670         return new Builder();
3671       }
3672 
clear()3673       public Builder clear() {
3674         super.clear();
3675         bypassGlobals_ = false;
3676         bitField0_ = (bitField0_ & ~0x00000001);
3677         if (throttleBuilder_ == null) {
3678           throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
3679         } else {
3680           throttleBuilder_.clear();
3681         }
3682         bitField0_ = (bitField0_ & ~0x00000002);
3683         return this;
3684       }
3685 
clone()3686       public Builder clone() {
3687         return create().mergeFrom(buildPartial());
3688       }
3689 
3690       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()3691           getDescriptorForType() {
3692         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_Quotas_descriptor;
3693       }
3694 
getDefaultInstanceForType()3695       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas getDefaultInstanceForType() {
3696         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.getDefaultInstance();
3697       }
3698 
build()3699       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas build() {
3700         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas result = buildPartial();
3701         if (!result.isInitialized()) {
3702           throw newUninitializedMessageException(result);
3703         }
3704         return result;
3705       }
3706 
buildPartial()3707       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas buildPartial() {
3708         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas(this);
3709         int from_bitField0_ = bitField0_;
3710         int to_bitField0_ = 0;
3711         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3712           to_bitField0_ |= 0x00000001;
3713         }
3714         result.bypassGlobals_ = bypassGlobals_;
3715         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3716           to_bitField0_ |= 0x00000002;
3717         }
3718         if (throttleBuilder_ == null) {
3719           result.throttle_ = throttle_;
3720         } else {
3721           result.throttle_ = throttleBuilder_.build();
3722         }
3723         result.bitField0_ = to_bitField0_;
3724         onBuilt();
3725         return result;
3726       }
3727 
mergeFrom(com.google.protobuf.Message other)3728       public Builder mergeFrom(com.google.protobuf.Message other) {
3729         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas) {
3730           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas)other);
3731         } else {
3732           super.mergeFrom(other);
3733           return this;
3734         }
3735       }
3736 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas other)3737       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas other) {
3738         if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas.getDefaultInstance()) return this;
3739         if (other.hasBypassGlobals()) {
3740           setBypassGlobals(other.getBypassGlobals());
3741         }
3742         if (other.hasThrottle()) {
3743           mergeThrottle(other.getThrottle());
3744         }
3745         this.mergeUnknownFields(other.getUnknownFields());
3746         return this;
3747       }
3748 
isInitialized()3749       public final boolean isInitialized() {
3750         if (hasThrottle()) {
3751           if (!getThrottle().isInitialized()) {
3752 
3753             return false;
3754           }
3755         }
3756         return true;
3757       }
3758 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3759       public Builder mergeFrom(
3760           com.google.protobuf.CodedInputStream input,
3761           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3762           throws java.io.IOException {
3763         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas parsedMessage = null;
3764         try {
3765           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3766         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3767           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas) e.getUnfinishedMessage();
3768           throw e;
3769         } finally {
3770           if (parsedMessage != null) {
3771             mergeFrom(parsedMessage);
3772           }
3773         }
3774         return this;
3775       }
3776       private int bitField0_;
3777 
3778       // optional bool bypass_globals = 1 [default = false];
3779       private boolean bypassGlobals_ ;
3780       /**
3781        * <code>optional bool bypass_globals = 1 [default = false];</code>
3782        */
hasBypassGlobals()3783       public boolean hasBypassGlobals() {
3784         return ((bitField0_ & 0x00000001) == 0x00000001);
3785       }
3786       /**
3787        * <code>optional bool bypass_globals = 1 [default = false];</code>
3788        */
getBypassGlobals()3789       public boolean getBypassGlobals() {
3790         return bypassGlobals_;
3791       }
3792       /**
3793        * <code>optional bool bypass_globals = 1 [default = false];</code>
3794        */
setBypassGlobals(boolean value)3795       public Builder setBypassGlobals(boolean value) {
3796         bitField0_ |= 0x00000001;
3797         bypassGlobals_ = value;
3798         onChanged();
3799         return this;
3800       }
3801       /**
3802        * <code>optional bool bypass_globals = 1 [default = false];</code>
3803        */
clearBypassGlobals()3804       public Builder clearBypassGlobals() {
3805         bitField0_ = (bitField0_ & ~0x00000001);
3806         bypassGlobals_ = false;
3807         onChanged();
3808         return this;
3809       }
3810 
3811       // optional .Throttle throttle = 2;
3812       private org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
3813       private com.google.protobuf.SingleFieldBuilder<
3814           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder> throttleBuilder_;
3815       /**
3816        * <code>optional .Throttle throttle = 2;</code>
3817        */
hasThrottle()3818       public boolean hasThrottle() {
3819         return ((bitField0_ & 0x00000002) == 0x00000002);
3820       }
3821       /**
3822        * <code>optional .Throttle throttle = 2;</code>
3823        */
getThrottle()3824       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle getThrottle() {
3825         if (throttleBuilder_ == null) {
3826           return throttle_;
3827         } else {
3828           return throttleBuilder_.getMessage();
3829         }
3830       }
3831       /**
3832        * <code>optional .Throttle throttle = 2;</code>
3833        */
setThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle value)3834       public Builder setThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle value) {
3835         if (throttleBuilder_ == null) {
3836           if (value == null) {
3837             throw new NullPointerException();
3838           }
3839           throttle_ = value;
3840           onChanged();
3841         } else {
3842           throttleBuilder_.setMessage(value);
3843         }
3844         bitField0_ |= 0x00000002;
3845         return this;
3846       }
3847       /**
3848        * <code>optional .Throttle throttle = 2;</code>
3849        */
setThrottle( org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder builderForValue)3850       public Builder setThrottle(
3851           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder builderForValue) {
3852         if (throttleBuilder_ == null) {
3853           throttle_ = builderForValue.build();
3854           onChanged();
3855         } else {
3856           throttleBuilder_.setMessage(builderForValue.build());
3857         }
3858         bitField0_ |= 0x00000002;
3859         return this;
3860       }
3861       /**
3862        * <code>optional .Throttle throttle = 2;</code>
3863        */
mergeThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle value)3864       public Builder mergeThrottle(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle value) {
3865         if (throttleBuilder_ == null) {
3866           if (((bitField0_ & 0x00000002) == 0x00000002) &&
3867               throttle_ != org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance()) {
3868             throttle_ =
3869               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.newBuilder(throttle_).mergeFrom(value).buildPartial();
3870           } else {
3871             throttle_ = value;
3872           }
3873           onChanged();
3874         } else {
3875           throttleBuilder_.mergeFrom(value);
3876         }
3877         bitField0_ |= 0x00000002;
3878         return this;
3879       }
3880       /**
3881        * <code>optional .Throttle throttle = 2;</code>
3882        */
clearThrottle()3883       public Builder clearThrottle() {
3884         if (throttleBuilder_ == null) {
3885           throttle_ = org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.getDefaultInstance();
3886           onChanged();
3887         } else {
3888           throttleBuilder_.clear();
3889         }
3890         bitField0_ = (bitField0_ & ~0x00000002);
3891         return this;
3892       }
3893       /**
3894        * <code>optional .Throttle throttle = 2;</code>
3895        */
getThrottleBuilder()3896       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder getThrottleBuilder() {
3897         bitField0_ |= 0x00000002;
3898         onChanged();
3899         return getThrottleFieldBuilder().getBuilder();
3900       }
3901       /**
3902        * <code>optional .Throttle throttle = 2;</code>
3903        */
getThrottleOrBuilder()3904       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder getThrottleOrBuilder() {
3905         if (throttleBuilder_ != null) {
3906           return throttleBuilder_.getMessageOrBuilder();
3907         } else {
3908           return throttle_;
3909         }
3910       }
3911       /**
3912        * <code>optional .Throttle throttle = 2;</code>
3913        */
3914       private com.google.protobuf.SingleFieldBuilder<
3915           org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder>
getThrottleFieldBuilder()3916           getThrottleFieldBuilder() {
3917         if (throttleBuilder_ == null) {
3918           throttleBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3919               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Throttle.Builder, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.ThrottleOrBuilder>(
3920                   throttle_,
3921                   getParentForChildren(),
3922                   isClean());
3923           throttle_ = null;
3924         }
3925         return throttleBuilder_;
3926       }
3927 
3928       // @@protoc_insertion_point(builder_scope:Quotas)
3929     }
3930 
3931     static {
3932       defaultInstance = new Quotas(true);
defaultInstance.initFields()3933       defaultInstance.initFields();
3934     }
3935 
3936     // @@protoc_insertion_point(class_scope:Quotas)
3937   }
3938 
3939   public interface QuotaUsageOrBuilder
3940       extends com.google.protobuf.MessageOrBuilder {
3941   }
3942   /**
3943    * Protobuf type {@code QuotaUsage}
3944    */
3945   public static final class QuotaUsage extends
3946       com.google.protobuf.GeneratedMessage
3947       implements QuotaUsageOrBuilder {
3948     // Use QuotaUsage.newBuilder() to construct.
QuotaUsage(com.google.protobuf.GeneratedMessage.Builder<?> builder)3949     private QuotaUsage(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3950       super(builder);
3951       this.unknownFields = builder.getUnknownFields();
3952     }
QuotaUsage(boolean noInit)3953     private QuotaUsage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3954 
3955     private static final QuotaUsage defaultInstance;
getDefaultInstance()3956     public static QuotaUsage getDefaultInstance() {
3957       return defaultInstance;
3958     }
3959 
getDefaultInstanceForType()3960     public QuotaUsage getDefaultInstanceForType() {
3961       return defaultInstance;
3962     }
3963 
3964     private final com.google.protobuf.UnknownFieldSet unknownFields;
3965     @java.lang.Override
3966     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()3967         getUnknownFields() {
3968       return this.unknownFields;
3969     }
QuotaUsage( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3970     private QuotaUsage(
3971         com.google.protobuf.CodedInputStream input,
3972         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3973         throws com.google.protobuf.InvalidProtocolBufferException {
3974       initFields();
3975       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3976           com.google.protobuf.UnknownFieldSet.newBuilder();
3977       try {
3978         boolean done = false;
3979         while (!done) {
3980           int tag = input.readTag();
3981           switch (tag) {
3982             case 0:
3983               done = true;
3984               break;
3985             default: {
3986               if (!parseUnknownField(input, unknownFields,
3987                                      extensionRegistry, tag)) {
3988                 done = true;
3989               }
3990               break;
3991             }
3992           }
3993         }
3994       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3995         throw e.setUnfinishedMessage(this);
3996       } catch (java.io.IOException e) {
3997         throw new com.google.protobuf.InvalidProtocolBufferException(
3998             e.getMessage()).setUnfinishedMessage(this);
3999       } finally {
4000         this.unknownFields = unknownFields.build();
4001         makeExtensionsImmutable();
4002       }
4003     }
4004     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4005         getDescriptor() {
4006       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_QuotaUsage_descriptor;
4007     }
4008 
4009     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4010         internalGetFieldAccessorTable() {
4011       return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_QuotaUsage_fieldAccessorTable
4012           .ensureFieldAccessorsInitialized(
4013               org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.Builder.class);
4014     }
4015 
4016     public static com.google.protobuf.Parser<QuotaUsage> PARSER =
4017         new com.google.protobuf.AbstractParser<QuotaUsage>() {
4018       public QuotaUsage parsePartialFrom(
4019           com.google.protobuf.CodedInputStream input,
4020           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4021           throws com.google.protobuf.InvalidProtocolBufferException {
4022         return new QuotaUsage(input, extensionRegistry);
4023       }
4024     };
4025 
4026     @java.lang.Override
getParserForType()4027     public com.google.protobuf.Parser<QuotaUsage> getParserForType() {
4028       return PARSER;
4029     }
4030 
initFields()4031     private void initFields() {
4032     }
4033     private byte memoizedIsInitialized = -1;
isInitialized()4034     public final boolean isInitialized() {
4035       byte isInitialized = memoizedIsInitialized;
4036       if (isInitialized != -1) return isInitialized == 1;
4037 
4038       memoizedIsInitialized = 1;
4039       return true;
4040     }
4041 
writeTo(com.google.protobuf.CodedOutputStream output)4042     public void writeTo(com.google.protobuf.CodedOutputStream output)
4043                         throws java.io.IOException {
4044       getSerializedSize();
4045       getUnknownFields().writeTo(output);
4046     }
4047 
4048     private int memoizedSerializedSize = -1;
getSerializedSize()4049     public int getSerializedSize() {
4050       int size = memoizedSerializedSize;
4051       if (size != -1) return size;
4052 
4053       size = 0;
4054       size += getUnknownFields().getSerializedSize();
4055       memoizedSerializedSize = size;
4056       return size;
4057     }
4058 
4059     private static final long serialVersionUID = 0L;
4060     @java.lang.Override
writeReplace()4061     protected java.lang.Object writeReplace()
4062         throws java.io.ObjectStreamException {
4063       return super.writeReplace();
4064     }
4065 
4066     @java.lang.Override
equals(final java.lang.Object obj)4067     public boolean equals(final java.lang.Object obj) {
4068       if (obj == this) {
4069        return true;
4070       }
4071       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage)) {
4072         return super.equals(obj);
4073       }
4074       org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage other = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage) obj;
4075 
4076       boolean result = true;
4077       result = result &&
4078           getUnknownFields().equals(other.getUnknownFields());
4079       return result;
4080     }
4081 
4082     private int memoizedHashCode = 0;
4083     @java.lang.Override
hashCode()4084     public int hashCode() {
4085       if (memoizedHashCode != 0) {
4086         return memoizedHashCode;
4087       }
4088       int hash = 41;
4089       hash = (19 * hash) + getDescriptorForType().hashCode();
4090       hash = (29 * hash) + getUnknownFields().hashCode();
4091       memoizedHashCode = hash;
4092       return hash;
4093     }
4094 
parseFrom( com.google.protobuf.ByteString data)4095     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
4096         com.google.protobuf.ByteString data)
4097         throws com.google.protobuf.InvalidProtocolBufferException {
4098       return PARSER.parseFrom(data);
4099     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4100     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
4101         com.google.protobuf.ByteString data,
4102         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4103         throws com.google.protobuf.InvalidProtocolBufferException {
4104       return PARSER.parseFrom(data, extensionRegistry);
4105     }
parseFrom(byte[] data)4106     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(byte[] data)
4107         throws com.google.protobuf.InvalidProtocolBufferException {
4108       return PARSER.parseFrom(data);
4109     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4110     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
4111         byte[] data,
4112         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4113         throws com.google.protobuf.InvalidProtocolBufferException {
4114       return PARSER.parseFrom(data, extensionRegistry);
4115     }
parseFrom(java.io.InputStream input)4116     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(java.io.InputStream input)
4117         throws java.io.IOException {
4118       return PARSER.parseFrom(input);
4119     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4120     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
4121         java.io.InputStream input,
4122         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4123         throws java.io.IOException {
4124       return PARSER.parseFrom(input, extensionRegistry);
4125     }
parseDelimitedFrom(java.io.InputStream input)4126     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseDelimitedFrom(java.io.InputStream input)
4127         throws java.io.IOException {
4128       return PARSER.parseDelimitedFrom(input);
4129     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4130     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseDelimitedFrom(
4131         java.io.InputStream input,
4132         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4133         throws java.io.IOException {
4134       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4135     }
parseFrom( com.google.protobuf.CodedInputStream input)4136     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
4137         com.google.protobuf.CodedInputStream input)
4138         throws java.io.IOException {
4139       return PARSER.parseFrom(input);
4140     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4141     public static org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parseFrom(
4142         com.google.protobuf.CodedInputStream input,
4143         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4144         throws java.io.IOException {
4145       return PARSER.parseFrom(input, extensionRegistry);
4146     }
4147 
newBuilder()4148     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()4149     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage prototype)4150     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage prototype) {
4151       return newBuilder().mergeFrom(prototype);
4152     }
toBuilder()4153     public Builder toBuilder() { return newBuilder(this); }
4154 
4155     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4156     protected Builder newBuilderForType(
4157         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4158       Builder builder = new Builder(parent);
4159       return builder;
4160     }
4161     /**
4162      * Protobuf type {@code QuotaUsage}
4163      */
4164     public static final class Builder extends
4165         com.google.protobuf.GeneratedMessage.Builder<Builder>
4166        implements org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsageOrBuilder {
4167       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4168           getDescriptor() {
4169         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_QuotaUsage_descriptor;
4170       }
4171 
4172       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4173           internalGetFieldAccessorTable() {
4174         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_QuotaUsage_fieldAccessorTable
4175             .ensureFieldAccessorsInitialized(
4176                 org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.class, org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.Builder.class);
4177       }
4178 
4179       // Construct using org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.newBuilder()
Builder()4180       private Builder() {
4181         maybeForceBuilderInitialization();
4182       }
4183 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4184       private Builder(
4185           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4186         super(parent);
4187         maybeForceBuilderInitialization();
4188       }
maybeForceBuilderInitialization()4189       private void maybeForceBuilderInitialization() {
4190         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4191         }
4192       }
create()4193       private static Builder create() {
4194         return new Builder();
4195       }
4196 
clear()4197       public Builder clear() {
4198         super.clear();
4199         return this;
4200       }
4201 
clone()4202       public Builder clone() {
4203         return create().mergeFrom(buildPartial());
4204       }
4205 
4206       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4207           getDescriptorForType() {
4208         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.internal_static_QuotaUsage_descriptor;
4209       }
4210 
getDefaultInstanceForType()4211       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage getDefaultInstanceForType() {
4212         return org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.getDefaultInstance();
4213       }
4214 
build()4215       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage build() {
4216         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage result = buildPartial();
4217         if (!result.isInitialized()) {
4218           throw newUninitializedMessageException(result);
4219         }
4220         return result;
4221       }
4222 
buildPartial()4223       public org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage buildPartial() {
4224         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage result = new org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage(this);
4225         onBuilt();
4226         return result;
4227       }
4228 
mergeFrom(com.google.protobuf.Message other)4229       public Builder mergeFrom(com.google.protobuf.Message other) {
4230         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage) {
4231           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage)other);
4232         } else {
4233           super.mergeFrom(other);
4234           return this;
4235         }
4236       }
4237 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage other)4238       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage other) {
4239         if (other == org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage.getDefaultInstance()) return this;
4240         this.mergeUnknownFields(other.getUnknownFields());
4241         return this;
4242       }
4243 
isInitialized()4244       public final boolean isInitialized() {
4245         return true;
4246       }
4247 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4248       public Builder mergeFrom(
4249           com.google.protobuf.CodedInputStream input,
4250           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4251           throws java.io.IOException {
4252         org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage parsedMessage = null;
4253         try {
4254           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4255         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4256           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.QuotaUsage) e.getUnfinishedMessage();
4257           throw e;
4258         } finally {
4259           if (parsedMessage != null) {
4260             mergeFrom(parsedMessage);
4261           }
4262         }
4263         return this;
4264       }
4265 
4266       // @@protoc_insertion_point(builder_scope:QuotaUsage)
4267     }
4268 
4269     static {
4270       defaultInstance = new QuotaUsage(true);
defaultInstance.initFields()4271       defaultInstance.initFields();
4272     }
4273 
4274     // @@protoc_insertion_point(class_scope:QuotaUsage)
4275   }
4276 
4277   private static com.google.protobuf.Descriptors.Descriptor
4278     internal_static_TimedQuota_descriptor;
4279   private static
4280     com.google.protobuf.GeneratedMessage.FieldAccessorTable
4281       internal_static_TimedQuota_fieldAccessorTable;
4282   private static com.google.protobuf.Descriptors.Descriptor
4283     internal_static_Throttle_descriptor;
4284   private static
4285     com.google.protobuf.GeneratedMessage.FieldAccessorTable
4286       internal_static_Throttle_fieldAccessorTable;
4287   private static com.google.protobuf.Descriptors.Descriptor
4288     internal_static_ThrottleRequest_descriptor;
4289   private static
4290     com.google.protobuf.GeneratedMessage.FieldAccessorTable
4291       internal_static_ThrottleRequest_fieldAccessorTable;
4292   private static com.google.protobuf.Descriptors.Descriptor
4293     internal_static_Quotas_descriptor;
4294   private static
4295     com.google.protobuf.GeneratedMessage.FieldAccessorTable
4296       internal_static_Quotas_fieldAccessorTable;
4297   private static com.google.protobuf.Descriptors.Descriptor
4298     internal_static_QuotaUsage_descriptor;
4299   private static
4300     com.google.protobuf.GeneratedMessage.FieldAccessorTable
4301       internal_static_QuotaUsage_fieldAccessorTable;
4302 
4303   public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor()4304       getDescriptor() {
4305     return descriptor;
4306   }
4307   private static com.google.protobuf.Descriptors.FileDescriptor
4308       descriptor;
4309   static {
4310     java.lang.String[] descriptorData = {
4311       "\n\013Quota.proto\032\013HBase.proto\"r\n\nTimedQuota" +
4312       "\022\034\n\ttime_unit\030\001 \002(\0162\t.TimeUnit\022\022\n\nsoft_l" +
4313       "imit\030\002 \001(\004\022\r\n\005share\030\003 \001(\002\022#\n\005scope\030\004 \001(\016" +
4314       "2\013.QuotaScope:\007MACHINE\"\307\001\n\010Throttle\022\034\n\007r" +
4315       "eq_num\030\001 \001(\0132\013.TimedQuota\022\035\n\010req_size\030\002 " +
4316       "\001(\0132\013.TimedQuota\022\036\n\twrite_num\030\003 \001(\0132\013.Ti" +
4317       "medQuota\022\037\n\nwrite_size\030\004 \001(\0132\013.TimedQuot" +
4318       "a\022\035\n\010read_num\030\005 \001(\0132\013.TimedQuota\022\036\n\tread" +
4319       "_size\030\006 \001(\0132\013.TimedQuota\"P\n\017ThrottleRequ" +
4320       "est\022\033\n\004type\030\001 \001(\0162\r.ThrottleType\022 \n\013time",
4321       "d_quota\030\002 \001(\0132\013.TimedQuota\"D\n\006Quotas\022\035\n\016" +
4322       "bypass_globals\030\001 \001(\010:\005false\022\033\n\010throttle\030" +
4323       "\002 \001(\0132\t.Throttle\"\014\n\nQuotaUsage*&\n\nQuotaS" +
4324       "cope\022\013\n\007CLUSTER\020\001\022\013\n\007MACHINE\020\002*v\n\014Thrott" +
4325       "leType\022\022\n\016REQUEST_NUMBER\020\001\022\020\n\014REQUEST_SI" +
4326       "ZE\020\002\022\020\n\014WRITE_NUMBER\020\003\022\016\n\nWRITE_SIZE\020\004\022\017" +
4327       "\n\013READ_NUMBER\020\005\022\r\n\tREAD_SIZE\020\006*\031\n\tQuotaT" +
4328       "ype\022\014\n\010THROTTLE\020\001BA\n*org.apache.hadoop.h" +
4329       "base.protobuf.generatedB\013QuotaProtosH\001\210\001" +
4330       "\001\240\001\001"
4331     };
4332     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
4333       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
4334         public com.google.protobuf.ExtensionRegistry assignDescriptors(
4335             com.google.protobuf.Descriptors.FileDescriptor root) {
4336           descriptor = root;
4337           internal_static_TimedQuota_descriptor =
4338             getDescriptor().getMessageTypes().get(0);
4339           internal_static_TimedQuota_fieldAccessorTable = new
4340             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4341               internal_static_TimedQuota_descriptor,
4342               new java.lang.String[] { "TimeUnit", "SoftLimit", "Share", "Scope", });
4343           internal_static_Throttle_descriptor =
4344             getDescriptor().getMessageTypes().get(1);
4345           internal_static_Throttle_fieldAccessorTable = new
4346             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4347               internal_static_Throttle_descriptor,
4348               new java.lang.String[] { "ReqNum", "ReqSize", "WriteNum", "WriteSize", "ReadNum", "ReadSize", });
4349           internal_static_ThrottleRequest_descriptor =
4350             getDescriptor().getMessageTypes().get(2);
4351           internal_static_ThrottleRequest_fieldAccessorTable = new
4352             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4353               internal_static_ThrottleRequest_descriptor,
4354               new java.lang.String[] { "Type", "TimedQuota", });
4355           internal_static_Quotas_descriptor =
4356             getDescriptor().getMessageTypes().get(3);
4357           internal_static_Quotas_fieldAccessorTable = new
4358             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4359               internal_static_Quotas_descriptor,
4360               new java.lang.String[] { "BypassGlobals", "Throttle", });
4361           internal_static_QuotaUsage_descriptor =
4362             getDescriptor().getMessageTypes().get(4);
4363           internal_static_QuotaUsage_fieldAccessorTable = new
4364             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
4365               internal_static_QuotaUsage_descriptor,
4366               new java.lang.String[] { });
4367           return null;
4368         }
4369       };
4370     com.google.protobuf.Descriptors.FileDescriptor
internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), }, assigner)4371       .internalBuildGeneratedFileFrom(descriptorData,
4372         new com.google.protobuf.Descriptors.FileDescriptor[] {
4373           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
4374         }, assigner);
4375   }
4376 
4377   // @@protoc_insertion_point(outer_class_scope)
4378 }
4379