1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: Filter.proto
3 
4 package org.apache.hadoop.hbase.protobuf.generated;
5 
6 public final class FilterProtos {
FilterProtos()7   private FilterProtos() {}
registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8   public static void registerAllExtensions(
9       com.google.protobuf.ExtensionRegistry registry) {
10   }
11   public interface FilterOrBuilder
12       extends com.google.protobuf.MessageOrBuilder {
13 
14     // required string name = 1;
15     /**
16      * <code>required string name = 1;</code>
17      */
hasName()18     boolean hasName();
19     /**
20      * <code>required string name = 1;</code>
21      */
getName()22     java.lang.String getName();
23     /**
24      * <code>required string name = 1;</code>
25      */
26     com.google.protobuf.ByteString
getNameBytes()27         getNameBytes();
28 
29     // optional bytes serialized_filter = 2;
30     /**
31      * <code>optional bytes serialized_filter = 2;</code>
32      */
hasSerializedFilter()33     boolean hasSerializedFilter();
34     /**
35      * <code>optional bytes serialized_filter = 2;</code>
36      */
getSerializedFilter()37     com.google.protobuf.ByteString getSerializedFilter();
38   }
39   /**
40    * Protobuf type {@code Filter}
41    */
42   public static final class Filter extends
43       com.google.protobuf.GeneratedMessage
44       implements FilterOrBuilder {
45     // Use Filter.newBuilder() to construct.
Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder)46     private Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
47       super(builder);
48       this.unknownFields = builder.getUnknownFields();
49     }
Filter(boolean noInit)50     private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
51 
52     private static final Filter defaultInstance;
getDefaultInstance()53     public static Filter getDefaultInstance() {
54       return defaultInstance;
55     }
56 
getDefaultInstanceForType()57     public Filter getDefaultInstanceForType() {
58       return defaultInstance;
59     }
60 
61     private final com.google.protobuf.UnknownFieldSet unknownFields;
62     @java.lang.Override
63     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()64         getUnknownFields() {
65       return this.unknownFields;
66     }
Filter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)67     private Filter(
68         com.google.protobuf.CodedInputStream input,
69         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
70         throws com.google.protobuf.InvalidProtocolBufferException {
71       initFields();
72       int mutable_bitField0_ = 0;
73       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
74           com.google.protobuf.UnknownFieldSet.newBuilder();
75       try {
76         boolean done = false;
77         while (!done) {
78           int tag = input.readTag();
79           switch (tag) {
80             case 0:
81               done = true;
82               break;
83             default: {
84               if (!parseUnknownField(input, unknownFields,
85                                      extensionRegistry, tag)) {
86                 done = true;
87               }
88               break;
89             }
90             case 10: {
91               bitField0_ |= 0x00000001;
92               name_ = input.readBytes();
93               break;
94             }
95             case 18: {
96               bitField0_ |= 0x00000002;
97               serializedFilter_ = input.readBytes();
98               break;
99             }
100           }
101         }
102       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
103         throw e.setUnfinishedMessage(this);
104       } catch (java.io.IOException e) {
105         throw new com.google.protobuf.InvalidProtocolBufferException(
106             e.getMessage()).setUnfinishedMessage(this);
107       } finally {
108         this.unknownFields = unknownFields.build();
109         makeExtensionsImmutable();
110       }
111     }
112     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()113         getDescriptor() {
114       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor;
115     }
116 
117     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()118         internalGetFieldAccessorTable() {
119       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable
120           .ensureFieldAccessorsInitialized(
121               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class);
122     }
123 
124     public static com.google.protobuf.Parser<Filter> PARSER =
125         new com.google.protobuf.AbstractParser<Filter>() {
126       public Filter parsePartialFrom(
127           com.google.protobuf.CodedInputStream input,
128           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
129           throws com.google.protobuf.InvalidProtocolBufferException {
130         return new Filter(input, extensionRegistry);
131       }
132     };
133 
134     @java.lang.Override
getParserForType()135     public com.google.protobuf.Parser<Filter> getParserForType() {
136       return PARSER;
137     }
138 
139     private int bitField0_;
140     // required string name = 1;
141     public static final int NAME_FIELD_NUMBER = 1;
142     private java.lang.Object name_;
143     /**
144      * <code>required string name = 1;</code>
145      */
hasName()146     public boolean hasName() {
147       return ((bitField0_ & 0x00000001) == 0x00000001);
148     }
149     /**
150      * <code>required string name = 1;</code>
151      */
getName()152     public java.lang.String getName() {
153       java.lang.Object ref = name_;
154       if (ref instanceof java.lang.String) {
155         return (java.lang.String) ref;
156       } else {
157         com.google.protobuf.ByteString bs =
158             (com.google.protobuf.ByteString) ref;
159         java.lang.String s = bs.toStringUtf8();
160         if (bs.isValidUtf8()) {
161           name_ = s;
162         }
163         return s;
164       }
165     }
166     /**
167      * <code>required string name = 1;</code>
168      */
169     public com.google.protobuf.ByteString
getNameBytes()170         getNameBytes() {
171       java.lang.Object ref = name_;
172       if (ref instanceof java.lang.String) {
173         com.google.protobuf.ByteString b =
174             com.google.protobuf.ByteString.copyFromUtf8(
175                 (java.lang.String) ref);
176         name_ = b;
177         return b;
178       } else {
179         return (com.google.protobuf.ByteString) ref;
180       }
181     }
182 
183     // optional bytes serialized_filter = 2;
184     public static final int SERIALIZED_FILTER_FIELD_NUMBER = 2;
185     private com.google.protobuf.ByteString serializedFilter_;
186     /**
187      * <code>optional bytes serialized_filter = 2;</code>
188      */
hasSerializedFilter()189     public boolean hasSerializedFilter() {
190       return ((bitField0_ & 0x00000002) == 0x00000002);
191     }
192     /**
193      * <code>optional bytes serialized_filter = 2;</code>
194      */
getSerializedFilter()195     public com.google.protobuf.ByteString getSerializedFilter() {
196       return serializedFilter_;
197     }
198 
initFields()199     private void initFields() {
200       name_ = "";
201       serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
202     }
203     private byte memoizedIsInitialized = -1;
isInitialized()204     public final boolean isInitialized() {
205       byte isInitialized = memoizedIsInitialized;
206       if (isInitialized != -1) return isInitialized == 1;
207 
208       if (!hasName()) {
209         memoizedIsInitialized = 0;
210         return false;
211       }
212       memoizedIsInitialized = 1;
213       return true;
214     }
215 
writeTo(com.google.protobuf.CodedOutputStream output)216     public void writeTo(com.google.protobuf.CodedOutputStream output)
217                         throws java.io.IOException {
218       getSerializedSize();
219       if (((bitField0_ & 0x00000001) == 0x00000001)) {
220         output.writeBytes(1, getNameBytes());
221       }
222       if (((bitField0_ & 0x00000002) == 0x00000002)) {
223         output.writeBytes(2, serializedFilter_);
224       }
225       getUnknownFields().writeTo(output);
226     }
227 
228     private int memoizedSerializedSize = -1;
getSerializedSize()229     public int getSerializedSize() {
230       int size = memoizedSerializedSize;
231       if (size != -1) return size;
232 
233       size = 0;
234       if (((bitField0_ & 0x00000001) == 0x00000001)) {
235         size += com.google.protobuf.CodedOutputStream
236           .computeBytesSize(1, getNameBytes());
237       }
238       if (((bitField0_ & 0x00000002) == 0x00000002)) {
239         size += com.google.protobuf.CodedOutputStream
240           .computeBytesSize(2, serializedFilter_);
241       }
242       size += getUnknownFields().getSerializedSize();
243       memoizedSerializedSize = size;
244       return size;
245     }
246 
247     private static final long serialVersionUID = 0L;
248     @java.lang.Override
writeReplace()249     protected java.lang.Object writeReplace()
250         throws java.io.ObjectStreamException {
251       return super.writeReplace();
252     }
253 
254     @java.lang.Override
equals(final java.lang.Object obj)255     public boolean equals(final java.lang.Object obj) {
256       if (obj == this) {
257        return true;
258       }
259       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)) {
260         return super.equals(obj);
261       }
262       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) obj;
263 
264       boolean result = true;
265       result = result && (hasName() == other.hasName());
266       if (hasName()) {
267         result = result && getName()
268             .equals(other.getName());
269       }
270       result = result && (hasSerializedFilter() == other.hasSerializedFilter());
271       if (hasSerializedFilter()) {
272         result = result && getSerializedFilter()
273             .equals(other.getSerializedFilter());
274       }
275       result = result &&
276           getUnknownFields().equals(other.getUnknownFields());
277       return result;
278     }
279 
280     private int memoizedHashCode = 0;
281     @java.lang.Override
hashCode()282     public int hashCode() {
283       if (memoizedHashCode != 0) {
284         return memoizedHashCode;
285       }
286       int hash = 41;
287       hash = (19 * hash) + getDescriptorForType().hashCode();
288       if (hasName()) {
289         hash = (37 * hash) + NAME_FIELD_NUMBER;
290         hash = (53 * hash) + getName().hashCode();
291       }
292       if (hasSerializedFilter()) {
293         hash = (37 * hash) + SERIALIZED_FILTER_FIELD_NUMBER;
294         hash = (53 * hash) + getSerializedFilter().hashCode();
295       }
296       hash = (29 * hash) + getUnknownFields().hashCode();
297       memoizedHashCode = hash;
298       return hash;
299     }
300 
parseFrom( com.google.protobuf.ByteString data)301     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
302         com.google.protobuf.ByteString data)
303         throws com.google.protobuf.InvalidProtocolBufferException {
304       return PARSER.parseFrom(data);
305     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)306     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
307         com.google.protobuf.ByteString data,
308         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
309         throws com.google.protobuf.InvalidProtocolBufferException {
310       return PARSER.parseFrom(data, extensionRegistry);
311     }
parseFrom(byte[] data)312     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(byte[] data)
313         throws com.google.protobuf.InvalidProtocolBufferException {
314       return PARSER.parseFrom(data);
315     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)316     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
317         byte[] data,
318         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
319         throws com.google.protobuf.InvalidProtocolBufferException {
320       return PARSER.parseFrom(data, extensionRegistry);
321     }
parseFrom(java.io.InputStream input)322     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(java.io.InputStream input)
323         throws java.io.IOException {
324       return PARSER.parseFrom(input);
325     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)326     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
327         java.io.InputStream input,
328         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
329         throws java.io.IOException {
330       return PARSER.parseFrom(input, extensionRegistry);
331     }
parseDelimitedFrom(java.io.InputStream input)332     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(java.io.InputStream input)
333         throws java.io.IOException {
334       return PARSER.parseDelimitedFrom(input);
335     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)336     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(
337         java.io.InputStream input,
338         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
339         throws java.io.IOException {
340       return PARSER.parseDelimitedFrom(input, extensionRegistry);
341     }
parseFrom( com.google.protobuf.CodedInputStream input)342     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
343         com.google.protobuf.CodedInputStream input)
344         throws java.io.IOException {
345       return PARSER.parseFrom(input);
346     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)347     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(
348         com.google.protobuf.CodedInputStream input,
349         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
350         throws java.io.IOException {
351       return PARSER.parseFrom(input, extensionRegistry);
352     }
353 
newBuilder()354     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()355     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype)356     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype) {
357       return newBuilder().mergeFrom(prototype);
358     }
toBuilder()359     public Builder toBuilder() { return newBuilder(this); }
360 
361     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)362     protected Builder newBuilderForType(
363         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
364       Builder builder = new Builder(parent);
365       return builder;
366     }
367     /**
368      * Protobuf type {@code Filter}
369      */
370     public static final class Builder extends
371         com.google.protobuf.GeneratedMessage.Builder<Builder>
372        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder {
373       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()374           getDescriptor() {
375         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor;
376       }
377 
378       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()379           internalGetFieldAccessorTable() {
380         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable
381             .ensureFieldAccessorsInitialized(
382                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class);
383       }
384 
385       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder()
Builder()386       private Builder() {
387         maybeForceBuilderInitialization();
388       }
389 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)390       private Builder(
391           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
392         super(parent);
393         maybeForceBuilderInitialization();
394       }
maybeForceBuilderInitialization()395       private void maybeForceBuilderInitialization() {
396         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
397         }
398       }
create()399       private static Builder create() {
400         return new Builder();
401       }
402 
clear()403       public Builder clear() {
404         super.clear();
405         name_ = "";
406         bitField0_ = (bitField0_ & ~0x00000001);
407         serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
408         bitField0_ = (bitField0_ & ~0x00000002);
409         return this;
410       }
411 
clone()412       public Builder clone() {
413         return create().mergeFrom(buildPartial());
414       }
415 
416       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()417           getDescriptorForType() {
418         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor;
419       }
420 
getDefaultInstanceForType()421       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getDefaultInstanceForType() {
422         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
423       }
424 
build()425       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter build() {
426         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = buildPartial();
427         if (!result.isInitialized()) {
428           throw newUninitializedMessageException(result);
429         }
430         return result;
431       }
432 
buildPartial()433       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter buildPartial() {
434         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter(this);
435         int from_bitField0_ = bitField0_;
436         int to_bitField0_ = 0;
437         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
438           to_bitField0_ |= 0x00000001;
439         }
440         result.name_ = name_;
441         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
442           to_bitField0_ |= 0x00000002;
443         }
444         result.serializedFilter_ = serializedFilter_;
445         result.bitField0_ = to_bitField0_;
446         onBuilt();
447         return result;
448       }
449 
mergeFrom(com.google.protobuf.Message other)450       public Builder mergeFrom(com.google.protobuf.Message other) {
451         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) {
452           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)other);
453         } else {
454           super.mergeFrom(other);
455           return this;
456         }
457       }
458 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other)459       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other) {
460         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) return this;
461         if (other.hasName()) {
462           bitField0_ |= 0x00000001;
463           name_ = other.name_;
464           onChanged();
465         }
466         if (other.hasSerializedFilter()) {
467           setSerializedFilter(other.getSerializedFilter());
468         }
469         this.mergeUnknownFields(other.getUnknownFields());
470         return this;
471       }
472 
isInitialized()473       public final boolean isInitialized() {
474         if (!hasName()) {
475 
476           return false;
477         }
478         return true;
479       }
480 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)481       public Builder mergeFrom(
482           com.google.protobuf.CodedInputStream input,
483           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
484           throws java.io.IOException {
485         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parsedMessage = null;
486         try {
487           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
488         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
489           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) e.getUnfinishedMessage();
490           throw e;
491         } finally {
492           if (parsedMessage != null) {
493             mergeFrom(parsedMessage);
494           }
495         }
496         return this;
497       }
498       private int bitField0_;
499 
500       // required string name = 1;
501       private java.lang.Object name_ = "";
502       /**
503        * <code>required string name = 1;</code>
504        */
hasName()505       public boolean hasName() {
506         return ((bitField0_ & 0x00000001) == 0x00000001);
507       }
508       /**
509        * <code>required string name = 1;</code>
510        */
getName()511       public java.lang.String getName() {
512         java.lang.Object ref = name_;
513         if (!(ref instanceof java.lang.String)) {
514           java.lang.String s = ((com.google.protobuf.ByteString) ref)
515               .toStringUtf8();
516           name_ = s;
517           return s;
518         } else {
519           return (java.lang.String) ref;
520         }
521       }
522       /**
523        * <code>required string name = 1;</code>
524        */
525       public com.google.protobuf.ByteString
getNameBytes()526           getNameBytes() {
527         java.lang.Object ref = name_;
528         if (ref instanceof String) {
529           com.google.protobuf.ByteString b =
530               com.google.protobuf.ByteString.copyFromUtf8(
531                   (java.lang.String) ref);
532           name_ = b;
533           return b;
534         } else {
535           return (com.google.protobuf.ByteString) ref;
536         }
537       }
538       /**
539        * <code>required string name = 1;</code>
540        */
setName( java.lang.String value)541       public Builder setName(
542           java.lang.String value) {
543         if (value == null) {
544     throw new NullPointerException();
545   }
546   bitField0_ |= 0x00000001;
547         name_ = value;
548         onChanged();
549         return this;
550       }
551       /**
552        * <code>required string name = 1;</code>
553        */
clearName()554       public Builder clearName() {
555         bitField0_ = (bitField0_ & ~0x00000001);
556         name_ = getDefaultInstance().getName();
557         onChanged();
558         return this;
559       }
560       /**
561        * <code>required string name = 1;</code>
562        */
setNameBytes( com.google.protobuf.ByteString value)563       public Builder setNameBytes(
564           com.google.protobuf.ByteString value) {
565         if (value == null) {
566     throw new NullPointerException();
567   }
568   bitField0_ |= 0x00000001;
569         name_ = value;
570         onChanged();
571         return this;
572       }
573 
574       // optional bytes serialized_filter = 2;
575       private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY;
576       /**
577        * <code>optional bytes serialized_filter = 2;</code>
578        */
hasSerializedFilter()579       public boolean hasSerializedFilter() {
580         return ((bitField0_ & 0x00000002) == 0x00000002);
581       }
582       /**
583        * <code>optional bytes serialized_filter = 2;</code>
584        */
getSerializedFilter()585       public com.google.protobuf.ByteString getSerializedFilter() {
586         return serializedFilter_;
587       }
588       /**
589        * <code>optional bytes serialized_filter = 2;</code>
590        */
setSerializedFilter(com.google.protobuf.ByteString value)591       public Builder setSerializedFilter(com.google.protobuf.ByteString value) {
592         if (value == null) {
593     throw new NullPointerException();
594   }
595   bitField0_ |= 0x00000002;
596         serializedFilter_ = value;
597         onChanged();
598         return this;
599       }
600       /**
601        * <code>optional bytes serialized_filter = 2;</code>
602        */
clearSerializedFilter()603       public Builder clearSerializedFilter() {
604         bitField0_ = (bitField0_ & ~0x00000002);
605         serializedFilter_ = getDefaultInstance().getSerializedFilter();
606         onChanged();
607         return this;
608       }
609 
610       // @@protoc_insertion_point(builder_scope:Filter)
611     }
612 
613     static {
614       defaultInstance = new Filter(true);
defaultInstance.initFields()615       defaultInstance.initFields();
616     }
617 
618     // @@protoc_insertion_point(class_scope:Filter)
619   }
620 
621   public interface ColumnCountGetFilterOrBuilder
622       extends com.google.protobuf.MessageOrBuilder {
623 
624     // required int32 limit = 1;
625     /**
626      * <code>required int32 limit = 1;</code>
627      */
hasLimit()628     boolean hasLimit();
629     /**
630      * <code>required int32 limit = 1;</code>
631      */
getLimit()632     int getLimit();
633   }
634   /**
635    * Protobuf type {@code ColumnCountGetFilter}
636    */
637   public static final class ColumnCountGetFilter extends
638       com.google.protobuf.GeneratedMessage
639       implements ColumnCountGetFilterOrBuilder {
640     // Use ColumnCountGetFilter.newBuilder() to construct.
ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)641     private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
642       super(builder);
643       this.unknownFields = builder.getUnknownFields();
644     }
ColumnCountGetFilter(boolean noInit)645     private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
646 
647     private static final ColumnCountGetFilter defaultInstance;
getDefaultInstance()648     public static ColumnCountGetFilter getDefaultInstance() {
649       return defaultInstance;
650     }
651 
getDefaultInstanceForType()652     public ColumnCountGetFilter getDefaultInstanceForType() {
653       return defaultInstance;
654     }
655 
656     private final com.google.protobuf.UnknownFieldSet unknownFields;
657     @java.lang.Override
658     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()659         getUnknownFields() {
660       return this.unknownFields;
661     }
ColumnCountGetFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)662     private ColumnCountGetFilter(
663         com.google.protobuf.CodedInputStream input,
664         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
665         throws com.google.protobuf.InvalidProtocolBufferException {
666       initFields();
667       int mutable_bitField0_ = 0;
668       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
669           com.google.protobuf.UnknownFieldSet.newBuilder();
670       try {
671         boolean done = false;
672         while (!done) {
673           int tag = input.readTag();
674           switch (tag) {
675             case 0:
676               done = true;
677               break;
678             default: {
679               if (!parseUnknownField(input, unknownFields,
680                                      extensionRegistry, tag)) {
681                 done = true;
682               }
683               break;
684             }
685             case 8: {
686               bitField0_ |= 0x00000001;
687               limit_ = input.readInt32();
688               break;
689             }
690           }
691         }
692       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
693         throw e.setUnfinishedMessage(this);
694       } catch (java.io.IOException e) {
695         throw new com.google.protobuf.InvalidProtocolBufferException(
696             e.getMessage()).setUnfinishedMessage(this);
697       } finally {
698         this.unknownFields = unknownFields.build();
699         makeExtensionsImmutable();
700       }
701     }
702     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()703         getDescriptor() {
704       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor;
705     }
706 
707     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()708         internalGetFieldAccessorTable() {
709       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable
710           .ensureFieldAccessorsInitialized(
711               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class);
712     }
713 
714     public static com.google.protobuf.Parser<ColumnCountGetFilter> PARSER =
715         new com.google.protobuf.AbstractParser<ColumnCountGetFilter>() {
716       public ColumnCountGetFilter parsePartialFrom(
717           com.google.protobuf.CodedInputStream input,
718           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
719           throws com.google.protobuf.InvalidProtocolBufferException {
720         return new ColumnCountGetFilter(input, extensionRegistry);
721       }
722     };
723 
724     @java.lang.Override
getParserForType()725     public com.google.protobuf.Parser<ColumnCountGetFilter> getParserForType() {
726       return PARSER;
727     }
728 
729     private int bitField0_;
730     // required int32 limit = 1;
731     public static final int LIMIT_FIELD_NUMBER = 1;
732     private int limit_;
733     /**
734      * <code>required int32 limit = 1;</code>
735      */
hasLimit()736     public boolean hasLimit() {
737       return ((bitField0_ & 0x00000001) == 0x00000001);
738     }
739     /**
740      * <code>required int32 limit = 1;</code>
741      */
getLimit()742     public int getLimit() {
743       return limit_;
744     }
745 
initFields()746     private void initFields() {
747       limit_ = 0;
748     }
749     private byte memoizedIsInitialized = -1;
isInitialized()750     public final boolean isInitialized() {
751       byte isInitialized = memoizedIsInitialized;
752       if (isInitialized != -1) return isInitialized == 1;
753 
754       if (!hasLimit()) {
755         memoizedIsInitialized = 0;
756         return false;
757       }
758       memoizedIsInitialized = 1;
759       return true;
760     }
761 
writeTo(com.google.protobuf.CodedOutputStream output)762     public void writeTo(com.google.protobuf.CodedOutputStream output)
763                         throws java.io.IOException {
764       getSerializedSize();
765       if (((bitField0_ & 0x00000001) == 0x00000001)) {
766         output.writeInt32(1, limit_);
767       }
768       getUnknownFields().writeTo(output);
769     }
770 
771     private int memoizedSerializedSize = -1;
getSerializedSize()772     public int getSerializedSize() {
773       int size = memoizedSerializedSize;
774       if (size != -1) return size;
775 
776       size = 0;
777       if (((bitField0_ & 0x00000001) == 0x00000001)) {
778         size += com.google.protobuf.CodedOutputStream
779           .computeInt32Size(1, limit_);
780       }
781       size += getUnknownFields().getSerializedSize();
782       memoizedSerializedSize = size;
783       return size;
784     }
785 
786     private static final long serialVersionUID = 0L;
787     @java.lang.Override
writeReplace()788     protected java.lang.Object writeReplace()
789         throws java.io.ObjectStreamException {
790       return super.writeReplace();
791     }
792 
793     @java.lang.Override
equals(final java.lang.Object obj)794     public boolean equals(final java.lang.Object obj) {
795       if (obj == this) {
796        return true;
797       }
798       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)) {
799         return super.equals(obj);
800       }
801       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj;
802 
803       boolean result = true;
804       result = result && (hasLimit() == other.hasLimit());
805       if (hasLimit()) {
806         result = result && (getLimit()
807             == other.getLimit());
808       }
809       result = result &&
810           getUnknownFields().equals(other.getUnknownFields());
811       return result;
812     }
813 
814     private int memoizedHashCode = 0;
815     @java.lang.Override
hashCode()816     public int hashCode() {
817       if (memoizedHashCode != 0) {
818         return memoizedHashCode;
819       }
820       int hash = 41;
821       hash = (19 * hash) + getDescriptorForType().hashCode();
822       if (hasLimit()) {
823         hash = (37 * hash) + LIMIT_FIELD_NUMBER;
824         hash = (53 * hash) + getLimit();
825       }
826       hash = (29 * hash) + getUnknownFields().hashCode();
827       memoizedHashCode = hash;
828       return hash;
829     }
830 
parseFrom( com.google.protobuf.ByteString data)831     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
832         com.google.protobuf.ByteString data)
833         throws com.google.protobuf.InvalidProtocolBufferException {
834       return PARSER.parseFrom(data);
835     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)836     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
837         com.google.protobuf.ByteString data,
838         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
839         throws com.google.protobuf.InvalidProtocolBufferException {
840       return PARSER.parseFrom(data, extensionRegistry);
841     }
parseFrom(byte[] data)842     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data)
843         throws com.google.protobuf.InvalidProtocolBufferException {
844       return PARSER.parseFrom(data);
845     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)846     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
847         byte[] data,
848         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
849         throws com.google.protobuf.InvalidProtocolBufferException {
850       return PARSER.parseFrom(data, extensionRegistry);
851     }
parseFrom(java.io.InputStream input)852     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input)
853         throws java.io.IOException {
854       return PARSER.parseFrom(input);
855     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)856     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
857         java.io.InputStream input,
858         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
859         throws java.io.IOException {
860       return PARSER.parseFrom(input, extensionRegistry);
861     }
parseDelimitedFrom(java.io.InputStream input)862     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input)
863         throws java.io.IOException {
864       return PARSER.parseDelimitedFrom(input);
865     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)866     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(
867         java.io.InputStream input,
868         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
869         throws java.io.IOException {
870       return PARSER.parseDelimitedFrom(input, extensionRegistry);
871     }
parseFrom( com.google.protobuf.CodedInputStream input)872     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
873         com.google.protobuf.CodedInputStream input)
874         throws java.io.IOException {
875       return PARSER.parseFrom(input);
876     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)877     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(
878         com.google.protobuf.CodedInputStream input,
879         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
880         throws java.io.IOException {
881       return PARSER.parseFrom(input, extensionRegistry);
882     }
883 
newBuilder()884     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()885     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype)886     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) {
887       return newBuilder().mergeFrom(prototype);
888     }
toBuilder()889     public Builder toBuilder() { return newBuilder(this); }
890 
891     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)892     protected Builder newBuilderForType(
893         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
894       Builder builder = new Builder(parent);
895       return builder;
896     }
897     /**
898      * Protobuf type {@code ColumnCountGetFilter}
899      */
900     public static final class Builder extends
901         com.google.protobuf.GeneratedMessage.Builder<Builder>
902        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder {
903       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()904           getDescriptor() {
905         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor;
906       }
907 
908       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()909           internalGetFieldAccessorTable() {
910         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable
911             .ensureFieldAccessorsInitialized(
912                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class);
913       }
914 
915       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder()
Builder()916       private Builder() {
917         maybeForceBuilderInitialization();
918       }
919 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)920       private Builder(
921           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
922         super(parent);
923         maybeForceBuilderInitialization();
924       }
maybeForceBuilderInitialization()925       private void maybeForceBuilderInitialization() {
926         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
927         }
928       }
create()929       private static Builder create() {
930         return new Builder();
931       }
932 
clear()933       public Builder clear() {
934         super.clear();
935         limit_ = 0;
936         bitField0_ = (bitField0_ & ~0x00000001);
937         return this;
938       }
939 
clone()940       public Builder clone() {
941         return create().mergeFrom(buildPartial());
942       }
943 
944       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()945           getDescriptorForType() {
946         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor;
947       }
948 
getDefaultInstanceForType()949       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() {
950         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance();
951       }
952 
build()953       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() {
954         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial();
955         if (!result.isInitialized()) {
956           throw newUninitializedMessageException(result);
957         }
958         return result;
959       }
960 
buildPartial()961       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() {
962         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this);
963         int from_bitField0_ = bitField0_;
964         int to_bitField0_ = 0;
965         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
966           to_bitField0_ |= 0x00000001;
967         }
968         result.limit_ = limit_;
969         result.bitField0_ = to_bitField0_;
970         onBuilt();
971         return result;
972       }
973 
mergeFrom(com.google.protobuf.Message other)974       public Builder mergeFrom(com.google.protobuf.Message other) {
975         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) {
976           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other);
977         } else {
978           super.mergeFrom(other);
979           return this;
980         }
981       }
982 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other)983       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) {
984         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this;
985         if (other.hasLimit()) {
986           setLimit(other.getLimit());
987         }
988         this.mergeUnknownFields(other.getUnknownFields());
989         return this;
990       }
991 
isInitialized()992       public final boolean isInitialized() {
993         if (!hasLimit()) {
994 
995           return false;
996         }
997         return true;
998       }
999 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1000       public Builder mergeFrom(
1001           com.google.protobuf.CodedInputStream input,
1002           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1003           throws java.io.IOException {
1004         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parsedMessage = null;
1005         try {
1006           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1007         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1008           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage();
1009           throw e;
1010         } finally {
1011           if (parsedMessage != null) {
1012             mergeFrom(parsedMessage);
1013           }
1014         }
1015         return this;
1016       }
1017       private int bitField0_;
1018 
1019       // required int32 limit = 1;
1020       private int limit_ ;
1021       /**
1022        * <code>required int32 limit = 1;</code>
1023        */
hasLimit()1024       public boolean hasLimit() {
1025         return ((bitField0_ & 0x00000001) == 0x00000001);
1026       }
1027       /**
1028        * <code>required int32 limit = 1;</code>
1029        */
getLimit()1030       public int getLimit() {
1031         return limit_;
1032       }
1033       /**
1034        * <code>required int32 limit = 1;</code>
1035        */
setLimit(int value)1036       public Builder setLimit(int value) {
1037         bitField0_ |= 0x00000001;
1038         limit_ = value;
1039         onChanged();
1040         return this;
1041       }
1042       /**
1043        * <code>required int32 limit = 1;</code>
1044        */
clearLimit()1045       public Builder clearLimit() {
1046         bitField0_ = (bitField0_ & ~0x00000001);
1047         limit_ = 0;
1048         onChanged();
1049         return this;
1050       }
1051 
1052       // @@protoc_insertion_point(builder_scope:ColumnCountGetFilter)
1053     }
1054 
1055     static {
1056       defaultInstance = new ColumnCountGetFilter(true);
defaultInstance.initFields()1057       defaultInstance.initFields();
1058     }
1059 
1060     // @@protoc_insertion_point(class_scope:ColumnCountGetFilter)
1061   }
1062 
1063   public interface ColumnPaginationFilterOrBuilder
1064       extends com.google.protobuf.MessageOrBuilder {
1065 
1066     // required int32 limit = 1;
1067     /**
1068      * <code>required int32 limit = 1;</code>
1069      */
hasLimit()1070     boolean hasLimit();
1071     /**
1072      * <code>required int32 limit = 1;</code>
1073      */
getLimit()1074     int getLimit();
1075 
1076     // optional int32 offset = 2;
1077     /**
1078      * <code>optional int32 offset = 2;</code>
1079      */
hasOffset()1080     boolean hasOffset();
1081     /**
1082      * <code>optional int32 offset = 2;</code>
1083      */
getOffset()1084     int getOffset();
1085 
1086     // optional bytes column_offset = 3;
1087     /**
1088      * <code>optional bytes column_offset = 3;</code>
1089      */
hasColumnOffset()1090     boolean hasColumnOffset();
1091     /**
1092      * <code>optional bytes column_offset = 3;</code>
1093      */
getColumnOffset()1094     com.google.protobuf.ByteString getColumnOffset();
1095   }
1096   /**
1097    * Protobuf type {@code ColumnPaginationFilter}
1098    */
1099   public static final class ColumnPaginationFilter extends
1100       com.google.protobuf.GeneratedMessage
1101       implements ColumnPaginationFilterOrBuilder {
1102     // Use ColumnPaginationFilter.newBuilder() to construct.
ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)1103     private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1104       super(builder);
1105       this.unknownFields = builder.getUnknownFields();
1106     }
ColumnPaginationFilter(boolean noInit)1107     private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1108 
1109     private static final ColumnPaginationFilter defaultInstance;
getDefaultInstance()1110     public static ColumnPaginationFilter getDefaultInstance() {
1111       return defaultInstance;
1112     }
1113 
getDefaultInstanceForType()1114     public ColumnPaginationFilter getDefaultInstanceForType() {
1115       return defaultInstance;
1116     }
1117 
1118     private final com.google.protobuf.UnknownFieldSet unknownFields;
1119     @java.lang.Override
1120     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1121         getUnknownFields() {
1122       return this.unknownFields;
1123     }
ColumnPaginationFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1124     private ColumnPaginationFilter(
1125         com.google.protobuf.CodedInputStream input,
1126         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1127         throws com.google.protobuf.InvalidProtocolBufferException {
1128       initFields();
1129       int mutable_bitField0_ = 0;
1130       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1131           com.google.protobuf.UnknownFieldSet.newBuilder();
1132       try {
1133         boolean done = false;
1134         while (!done) {
1135           int tag = input.readTag();
1136           switch (tag) {
1137             case 0:
1138               done = true;
1139               break;
1140             default: {
1141               if (!parseUnknownField(input, unknownFields,
1142                                      extensionRegistry, tag)) {
1143                 done = true;
1144               }
1145               break;
1146             }
1147             case 8: {
1148               bitField0_ |= 0x00000001;
1149               limit_ = input.readInt32();
1150               break;
1151             }
1152             case 16: {
1153               bitField0_ |= 0x00000002;
1154               offset_ = input.readInt32();
1155               break;
1156             }
1157             case 26: {
1158               bitField0_ |= 0x00000004;
1159               columnOffset_ = input.readBytes();
1160               break;
1161             }
1162           }
1163         }
1164       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1165         throw e.setUnfinishedMessage(this);
1166       } catch (java.io.IOException e) {
1167         throw new com.google.protobuf.InvalidProtocolBufferException(
1168             e.getMessage()).setUnfinishedMessage(this);
1169       } finally {
1170         this.unknownFields = unknownFields.build();
1171         makeExtensionsImmutable();
1172       }
1173     }
1174     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1175         getDescriptor() {
1176       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor;
1177     }
1178 
1179     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1180         internalGetFieldAccessorTable() {
1181       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable
1182           .ensureFieldAccessorsInitialized(
1183               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class);
1184     }
1185 
1186     public static com.google.protobuf.Parser<ColumnPaginationFilter> PARSER =
1187         new com.google.protobuf.AbstractParser<ColumnPaginationFilter>() {
1188       public ColumnPaginationFilter parsePartialFrom(
1189           com.google.protobuf.CodedInputStream input,
1190           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1191           throws com.google.protobuf.InvalidProtocolBufferException {
1192         return new ColumnPaginationFilter(input, extensionRegistry);
1193       }
1194     };
1195 
1196     @java.lang.Override
getParserForType()1197     public com.google.protobuf.Parser<ColumnPaginationFilter> getParserForType() {
1198       return PARSER;
1199     }
1200 
1201     private int bitField0_;
1202     // required int32 limit = 1;
1203     public static final int LIMIT_FIELD_NUMBER = 1;
1204     private int limit_;
1205     /**
1206      * <code>required int32 limit = 1;</code>
1207      */
hasLimit()1208     public boolean hasLimit() {
1209       return ((bitField0_ & 0x00000001) == 0x00000001);
1210     }
1211     /**
1212      * <code>required int32 limit = 1;</code>
1213      */
getLimit()1214     public int getLimit() {
1215       return limit_;
1216     }
1217 
1218     // optional int32 offset = 2;
1219     public static final int OFFSET_FIELD_NUMBER = 2;
1220     private int offset_;
1221     /**
1222      * <code>optional int32 offset = 2;</code>
1223      */
hasOffset()1224     public boolean hasOffset() {
1225       return ((bitField0_ & 0x00000002) == 0x00000002);
1226     }
1227     /**
1228      * <code>optional int32 offset = 2;</code>
1229      */
getOffset()1230     public int getOffset() {
1231       return offset_;
1232     }
1233 
1234     // optional bytes column_offset = 3;
1235     public static final int COLUMN_OFFSET_FIELD_NUMBER = 3;
1236     private com.google.protobuf.ByteString columnOffset_;
1237     /**
1238      * <code>optional bytes column_offset = 3;</code>
1239      */
hasColumnOffset()1240     public boolean hasColumnOffset() {
1241       return ((bitField0_ & 0x00000004) == 0x00000004);
1242     }
1243     /**
1244      * <code>optional bytes column_offset = 3;</code>
1245      */
getColumnOffset()1246     public com.google.protobuf.ByteString getColumnOffset() {
1247       return columnOffset_;
1248     }
1249 
initFields()1250     private void initFields() {
1251       limit_ = 0;
1252       offset_ = 0;
1253       columnOffset_ = com.google.protobuf.ByteString.EMPTY;
1254     }
1255     private byte memoizedIsInitialized = -1;
isInitialized()1256     public final boolean isInitialized() {
1257       byte isInitialized = memoizedIsInitialized;
1258       if (isInitialized != -1) return isInitialized == 1;
1259 
1260       if (!hasLimit()) {
1261         memoizedIsInitialized = 0;
1262         return false;
1263       }
1264       memoizedIsInitialized = 1;
1265       return true;
1266     }
1267 
writeTo(com.google.protobuf.CodedOutputStream output)1268     public void writeTo(com.google.protobuf.CodedOutputStream output)
1269                         throws java.io.IOException {
1270       getSerializedSize();
1271       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1272         output.writeInt32(1, limit_);
1273       }
1274       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1275         output.writeInt32(2, offset_);
1276       }
1277       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1278         output.writeBytes(3, columnOffset_);
1279       }
1280       getUnknownFields().writeTo(output);
1281     }
1282 
1283     private int memoizedSerializedSize = -1;
getSerializedSize()1284     public int getSerializedSize() {
1285       int size = memoizedSerializedSize;
1286       if (size != -1) return size;
1287 
1288       size = 0;
1289       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1290         size += com.google.protobuf.CodedOutputStream
1291           .computeInt32Size(1, limit_);
1292       }
1293       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1294         size += com.google.protobuf.CodedOutputStream
1295           .computeInt32Size(2, offset_);
1296       }
1297       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1298         size += com.google.protobuf.CodedOutputStream
1299           .computeBytesSize(3, columnOffset_);
1300       }
1301       size += getUnknownFields().getSerializedSize();
1302       memoizedSerializedSize = size;
1303       return size;
1304     }
1305 
1306     private static final long serialVersionUID = 0L;
1307     @java.lang.Override
writeReplace()1308     protected java.lang.Object writeReplace()
1309         throws java.io.ObjectStreamException {
1310       return super.writeReplace();
1311     }
1312 
1313     @java.lang.Override
equals(final java.lang.Object obj)1314     public boolean equals(final java.lang.Object obj) {
1315       if (obj == this) {
1316        return true;
1317       }
1318       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)) {
1319         return super.equals(obj);
1320       }
1321       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj;
1322 
1323       boolean result = true;
1324       result = result && (hasLimit() == other.hasLimit());
1325       if (hasLimit()) {
1326         result = result && (getLimit()
1327             == other.getLimit());
1328       }
1329       result = result && (hasOffset() == other.hasOffset());
1330       if (hasOffset()) {
1331         result = result && (getOffset()
1332             == other.getOffset());
1333       }
1334       result = result && (hasColumnOffset() == other.hasColumnOffset());
1335       if (hasColumnOffset()) {
1336         result = result && getColumnOffset()
1337             .equals(other.getColumnOffset());
1338       }
1339       result = result &&
1340           getUnknownFields().equals(other.getUnknownFields());
1341       return result;
1342     }
1343 
1344     private int memoizedHashCode = 0;
1345     @java.lang.Override
hashCode()1346     public int hashCode() {
1347       if (memoizedHashCode != 0) {
1348         return memoizedHashCode;
1349       }
1350       int hash = 41;
1351       hash = (19 * hash) + getDescriptorForType().hashCode();
1352       if (hasLimit()) {
1353         hash = (37 * hash) + LIMIT_FIELD_NUMBER;
1354         hash = (53 * hash) + getLimit();
1355       }
1356       if (hasOffset()) {
1357         hash = (37 * hash) + OFFSET_FIELD_NUMBER;
1358         hash = (53 * hash) + getOffset();
1359       }
1360       if (hasColumnOffset()) {
1361         hash = (37 * hash) + COLUMN_OFFSET_FIELD_NUMBER;
1362         hash = (53 * hash) + getColumnOffset().hashCode();
1363       }
1364       hash = (29 * hash) + getUnknownFields().hashCode();
1365       memoizedHashCode = hash;
1366       return hash;
1367     }
1368 
parseFrom( com.google.protobuf.ByteString data)1369     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
1370         com.google.protobuf.ByteString data)
1371         throws com.google.protobuf.InvalidProtocolBufferException {
1372       return PARSER.parseFrom(data);
1373     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1374     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
1375         com.google.protobuf.ByteString data,
1376         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1377         throws com.google.protobuf.InvalidProtocolBufferException {
1378       return PARSER.parseFrom(data, extensionRegistry);
1379     }
parseFrom(byte[] data)1380     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data)
1381         throws com.google.protobuf.InvalidProtocolBufferException {
1382       return PARSER.parseFrom(data);
1383     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1384     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
1385         byte[] data,
1386         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1387         throws com.google.protobuf.InvalidProtocolBufferException {
1388       return PARSER.parseFrom(data, extensionRegistry);
1389     }
parseFrom(java.io.InputStream input)1390     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input)
1391         throws java.io.IOException {
1392       return PARSER.parseFrom(input);
1393     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1394     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
1395         java.io.InputStream input,
1396         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1397         throws java.io.IOException {
1398       return PARSER.parseFrom(input, extensionRegistry);
1399     }
parseDelimitedFrom(java.io.InputStream input)1400     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input)
1401         throws java.io.IOException {
1402       return PARSER.parseDelimitedFrom(input);
1403     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1404     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(
1405         java.io.InputStream input,
1406         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1407         throws java.io.IOException {
1408       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1409     }
parseFrom( com.google.protobuf.CodedInputStream input)1410     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
1411         com.google.protobuf.CodedInputStream input)
1412         throws java.io.IOException {
1413       return PARSER.parseFrom(input);
1414     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1415     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(
1416         com.google.protobuf.CodedInputStream input,
1417         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1418         throws java.io.IOException {
1419       return PARSER.parseFrom(input, extensionRegistry);
1420     }
1421 
newBuilder()1422     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1423     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype)1424     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) {
1425       return newBuilder().mergeFrom(prototype);
1426     }
toBuilder()1427     public Builder toBuilder() { return newBuilder(this); }
1428 
1429     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1430     protected Builder newBuilderForType(
1431         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1432       Builder builder = new Builder(parent);
1433       return builder;
1434     }
1435     /**
1436      * Protobuf type {@code ColumnPaginationFilter}
1437      */
1438     public static final class Builder extends
1439         com.google.protobuf.GeneratedMessage.Builder<Builder>
1440        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder {
1441       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1442           getDescriptor() {
1443         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor;
1444       }
1445 
1446       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1447           internalGetFieldAccessorTable() {
1448         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable
1449             .ensureFieldAccessorsInitialized(
1450                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class);
1451       }
1452 
1453       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder()
Builder()1454       private Builder() {
1455         maybeForceBuilderInitialization();
1456       }
1457 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1458       private Builder(
1459           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1460         super(parent);
1461         maybeForceBuilderInitialization();
1462       }
maybeForceBuilderInitialization()1463       private void maybeForceBuilderInitialization() {
1464         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1465         }
1466       }
create()1467       private static Builder create() {
1468         return new Builder();
1469       }
1470 
clear()1471       public Builder clear() {
1472         super.clear();
1473         limit_ = 0;
1474         bitField0_ = (bitField0_ & ~0x00000001);
1475         offset_ = 0;
1476         bitField0_ = (bitField0_ & ~0x00000002);
1477         columnOffset_ = com.google.protobuf.ByteString.EMPTY;
1478         bitField0_ = (bitField0_ & ~0x00000004);
1479         return this;
1480       }
1481 
clone()1482       public Builder clone() {
1483         return create().mergeFrom(buildPartial());
1484       }
1485 
1486       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1487           getDescriptorForType() {
1488         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor;
1489       }
1490 
getDefaultInstanceForType()1491       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() {
1492         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance();
1493       }
1494 
build()1495       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() {
1496         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial();
1497         if (!result.isInitialized()) {
1498           throw newUninitializedMessageException(result);
1499         }
1500         return result;
1501       }
1502 
buildPartial()1503       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() {
1504         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this);
1505         int from_bitField0_ = bitField0_;
1506         int to_bitField0_ = 0;
1507         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1508           to_bitField0_ |= 0x00000001;
1509         }
1510         result.limit_ = limit_;
1511         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1512           to_bitField0_ |= 0x00000002;
1513         }
1514         result.offset_ = offset_;
1515         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1516           to_bitField0_ |= 0x00000004;
1517         }
1518         result.columnOffset_ = columnOffset_;
1519         result.bitField0_ = to_bitField0_;
1520         onBuilt();
1521         return result;
1522       }
1523 
mergeFrom(com.google.protobuf.Message other)1524       public Builder mergeFrom(com.google.protobuf.Message other) {
1525         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) {
1526           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other);
1527         } else {
1528           super.mergeFrom(other);
1529           return this;
1530         }
1531       }
1532 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other)1533       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) {
1534         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this;
1535         if (other.hasLimit()) {
1536           setLimit(other.getLimit());
1537         }
1538         if (other.hasOffset()) {
1539           setOffset(other.getOffset());
1540         }
1541         if (other.hasColumnOffset()) {
1542           setColumnOffset(other.getColumnOffset());
1543         }
1544         this.mergeUnknownFields(other.getUnknownFields());
1545         return this;
1546       }
1547 
isInitialized()1548       public final boolean isInitialized() {
1549         if (!hasLimit()) {
1550 
1551           return false;
1552         }
1553         return true;
1554       }
1555 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1556       public Builder mergeFrom(
1557           com.google.protobuf.CodedInputStream input,
1558           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1559           throws java.io.IOException {
1560         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parsedMessage = null;
1561         try {
1562           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1563         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1564           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage();
1565           throw e;
1566         } finally {
1567           if (parsedMessage != null) {
1568             mergeFrom(parsedMessage);
1569           }
1570         }
1571         return this;
1572       }
1573       private int bitField0_;
1574 
1575       // required int32 limit = 1;
1576       private int limit_ ;
1577       /**
1578        * <code>required int32 limit = 1;</code>
1579        */
hasLimit()1580       public boolean hasLimit() {
1581         return ((bitField0_ & 0x00000001) == 0x00000001);
1582       }
1583       /**
1584        * <code>required int32 limit = 1;</code>
1585        */
getLimit()1586       public int getLimit() {
1587         return limit_;
1588       }
1589       /**
1590        * <code>required int32 limit = 1;</code>
1591        */
setLimit(int value)1592       public Builder setLimit(int value) {
1593         bitField0_ |= 0x00000001;
1594         limit_ = value;
1595         onChanged();
1596         return this;
1597       }
1598       /**
1599        * <code>required int32 limit = 1;</code>
1600        */
clearLimit()1601       public Builder clearLimit() {
1602         bitField0_ = (bitField0_ & ~0x00000001);
1603         limit_ = 0;
1604         onChanged();
1605         return this;
1606       }
1607 
1608       // optional int32 offset = 2;
1609       private int offset_ ;
1610       /**
1611        * <code>optional int32 offset = 2;</code>
1612        */
hasOffset()1613       public boolean hasOffset() {
1614         return ((bitField0_ & 0x00000002) == 0x00000002);
1615       }
1616       /**
1617        * <code>optional int32 offset = 2;</code>
1618        */
getOffset()1619       public int getOffset() {
1620         return offset_;
1621       }
1622       /**
1623        * <code>optional int32 offset = 2;</code>
1624        */
setOffset(int value)1625       public Builder setOffset(int value) {
1626         bitField0_ |= 0x00000002;
1627         offset_ = value;
1628         onChanged();
1629         return this;
1630       }
1631       /**
1632        * <code>optional int32 offset = 2;</code>
1633        */
clearOffset()1634       public Builder clearOffset() {
1635         bitField0_ = (bitField0_ & ~0x00000002);
1636         offset_ = 0;
1637         onChanged();
1638         return this;
1639       }
1640 
1641       // optional bytes column_offset = 3;
1642       private com.google.protobuf.ByteString columnOffset_ = com.google.protobuf.ByteString.EMPTY;
1643       /**
1644        * <code>optional bytes column_offset = 3;</code>
1645        */
hasColumnOffset()1646       public boolean hasColumnOffset() {
1647         return ((bitField0_ & 0x00000004) == 0x00000004);
1648       }
1649       /**
1650        * <code>optional bytes column_offset = 3;</code>
1651        */
getColumnOffset()1652       public com.google.protobuf.ByteString getColumnOffset() {
1653         return columnOffset_;
1654       }
1655       /**
1656        * <code>optional bytes column_offset = 3;</code>
1657        */
setColumnOffset(com.google.protobuf.ByteString value)1658       public Builder setColumnOffset(com.google.protobuf.ByteString value) {
1659         if (value == null) {
1660     throw new NullPointerException();
1661   }
1662   bitField0_ |= 0x00000004;
1663         columnOffset_ = value;
1664         onChanged();
1665         return this;
1666       }
1667       /**
1668        * <code>optional bytes column_offset = 3;</code>
1669        */
clearColumnOffset()1670       public Builder clearColumnOffset() {
1671         bitField0_ = (bitField0_ & ~0x00000004);
1672         columnOffset_ = getDefaultInstance().getColumnOffset();
1673         onChanged();
1674         return this;
1675       }
1676 
1677       // @@protoc_insertion_point(builder_scope:ColumnPaginationFilter)
1678     }
1679 
1680     static {
1681       defaultInstance = new ColumnPaginationFilter(true);
defaultInstance.initFields()1682       defaultInstance.initFields();
1683     }
1684 
1685     // @@protoc_insertion_point(class_scope:ColumnPaginationFilter)
1686   }
1687 
1688   public interface ColumnPrefixFilterOrBuilder
1689       extends com.google.protobuf.MessageOrBuilder {
1690 
1691     // required bytes prefix = 1;
1692     /**
1693      * <code>required bytes prefix = 1;</code>
1694      */
hasPrefix()1695     boolean hasPrefix();
1696     /**
1697      * <code>required bytes prefix = 1;</code>
1698      */
getPrefix()1699     com.google.protobuf.ByteString getPrefix();
1700   }
1701   /**
1702    * Protobuf type {@code ColumnPrefixFilter}
1703    */
1704   public static final class ColumnPrefixFilter extends
1705       com.google.protobuf.GeneratedMessage
1706       implements ColumnPrefixFilterOrBuilder {
1707     // Use ColumnPrefixFilter.newBuilder() to construct.
ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)1708     private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1709       super(builder);
1710       this.unknownFields = builder.getUnknownFields();
1711     }
ColumnPrefixFilter(boolean noInit)1712     private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1713 
1714     private static final ColumnPrefixFilter defaultInstance;
getDefaultInstance()1715     public static ColumnPrefixFilter getDefaultInstance() {
1716       return defaultInstance;
1717     }
1718 
getDefaultInstanceForType()1719     public ColumnPrefixFilter getDefaultInstanceForType() {
1720       return defaultInstance;
1721     }
1722 
1723     private final com.google.protobuf.UnknownFieldSet unknownFields;
1724     @java.lang.Override
1725     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1726         getUnknownFields() {
1727       return this.unknownFields;
1728     }
ColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1729     private ColumnPrefixFilter(
1730         com.google.protobuf.CodedInputStream input,
1731         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1732         throws com.google.protobuf.InvalidProtocolBufferException {
1733       initFields();
1734       int mutable_bitField0_ = 0;
1735       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1736           com.google.protobuf.UnknownFieldSet.newBuilder();
1737       try {
1738         boolean done = false;
1739         while (!done) {
1740           int tag = input.readTag();
1741           switch (tag) {
1742             case 0:
1743               done = true;
1744               break;
1745             default: {
1746               if (!parseUnknownField(input, unknownFields,
1747                                      extensionRegistry, tag)) {
1748                 done = true;
1749               }
1750               break;
1751             }
1752             case 10: {
1753               bitField0_ |= 0x00000001;
1754               prefix_ = input.readBytes();
1755               break;
1756             }
1757           }
1758         }
1759       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1760         throw e.setUnfinishedMessage(this);
1761       } catch (java.io.IOException e) {
1762         throw new com.google.protobuf.InvalidProtocolBufferException(
1763             e.getMessage()).setUnfinishedMessage(this);
1764       } finally {
1765         this.unknownFields = unknownFields.build();
1766         makeExtensionsImmutable();
1767       }
1768     }
1769     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1770         getDescriptor() {
1771       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor;
1772     }
1773 
1774     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1775         internalGetFieldAccessorTable() {
1776       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable
1777           .ensureFieldAccessorsInitialized(
1778               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class);
1779     }
1780 
1781     public static com.google.protobuf.Parser<ColumnPrefixFilter> PARSER =
1782         new com.google.protobuf.AbstractParser<ColumnPrefixFilter>() {
1783       public ColumnPrefixFilter parsePartialFrom(
1784           com.google.protobuf.CodedInputStream input,
1785           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1786           throws com.google.protobuf.InvalidProtocolBufferException {
1787         return new ColumnPrefixFilter(input, extensionRegistry);
1788       }
1789     };
1790 
1791     @java.lang.Override
getParserForType()1792     public com.google.protobuf.Parser<ColumnPrefixFilter> getParserForType() {
1793       return PARSER;
1794     }
1795 
1796     private int bitField0_;
1797     // required bytes prefix = 1;
1798     public static final int PREFIX_FIELD_NUMBER = 1;
1799     private com.google.protobuf.ByteString prefix_;
1800     /**
1801      * <code>required bytes prefix = 1;</code>
1802      */
hasPrefix()1803     public boolean hasPrefix() {
1804       return ((bitField0_ & 0x00000001) == 0x00000001);
1805     }
1806     /**
1807      * <code>required bytes prefix = 1;</code>
1808      */
getPrefix()1809     public com.google.protobuf.ByteString getPrefix() {
1810       return prefix_;
1811     }
1812 
initFields()1813     private void initFields() {
1814       prefix_ = com.google.protobuf.ByteString.EMPTY;
1815     }
1816     private byte memoizedIsInitialized = -1;
isInitialized()1817     public final boolean isInitialized() {
1818       byte isInitialized = memoizedIsInitialized;
1819       if (isInitialized != -1) return isInitialized == 1;
1820 
1821       if (!hasPrefix()) {
1822         memoizedIsInitialized = 0;
1823         return false;
1824       }
1825       memoizedIsInitialized = 1;
1826       return true;
1827     }
1828 
writeTo(com.google.protobuf.CodedOutputStream output)1829     public void writeTo(com.google.protobuf.CodedOutputStream output)
1830                         throws java.io.IOException {
1831       getSerializedSize();
1832       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1833         output.writeBytes(1, prefix_);
1834       }
1835       getUnknownFields().writeTo(output);
1836     }
1837 
1838     private int memoizedSerializedSize = -1;
getSerializedSize()1839     public int getSerializedSize() {
1840       int size = memoizedSerializedSize;
1841       if (size != -1) return size;
1842 
1843       size = 0;
1844       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1845         size += com.google.protobuf.CodedOutputStream
1846           .computeBytesSize(1, prefix_);
1847       }
1848       size += getUnknownFields().getSerializedSize();
1849       memoizedSerializedSize = size;
1850       return size;
1851     }
1852 
1853     private static final long serialVersionUID = 0L;
1854     @java.lang.Override
writeReplace()1855     protected java.lang.Object writeReplace()
1856         throws java.io.ObjectStreamException {
1857       return super.writeReplace();
1858     }
1859 
1860     @java.lang.Override
equals(final java.lang.Object obj)1861     public boolean equals(final java.lang.Object obj) {
1862       if (obj == this) {
1863        return true;
1864       }
1865       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)) {
1866         return super.equals(obj);
1867       }
1868       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj;
1869 
1870       boolean result = true;
1871       result = result && (hasPrefix() == other.hasPrefix());
1872       if (hasPrefix()) {
1873         result = result && getPrefix()
1874             .equals(other.getPrefix());
1875       }
1876       result = result &&
1877           getUnknownFields().equals(other.getUnknownFields());
1878       return result;
1879     }
1880 
1881     private int memoizedHashCode = 0;
1882     @java.lang.Override
hashCode()1883     public int hashCode() {
1884       if (memoizedHashCode != 0) {
1885         return memoizedHashCode;
1886       }
1887       int hash = 41;
1888       hash = (19 * hash) + getDescriptorForType().hashCode();
1889       if (hasPrefix()) {
1890         hash = (37 * hash) + PREFIX_FIELD_NUMBER;
1891         hash = (53 * hash) + getPrefix().hashCode();
1892       }
1893       hash = (29 * hash) + getUnknownFields().hashCode();
1894       memoizedHashCode = hash;
1895       return hash;
1896     }
1897 
parseFrom( com.google.protobuf.ByteString data)1898     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
1899         com.google.protobuf.ByteString data)
1900         throws com.google.protobuf.InvalidProtocolBufferException {
1901       return PARSER.parseFrom(data);
1902     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1903     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
1904         com.google.protobuf.ByteString data,
1905         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1906         throws com.google.protobuf.InvalidProtocolBufferException {
1907       return PARSER.parseFrom(data, extensionRegistry);
1908     }
parseFrom(byte[] data)1909     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data)
1910         throws com.google.protobuf.InvalidProtocolBufferException {
1911       return PARSER.parseFrom(data);
1912     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1913     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
1914         byte[] data,
1915         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1916         throws com.google.protobuf.InvalidProtocolBufferException {
1917       return PARSER.parseFrom(data, extensionRegistry);
1918     }
parseFrom(java.io.InputStream input)1919     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input)
1920         throws java.io.IOException {
1921       return PARSER.parseFrom(input);
1922     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1923     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
1924         java.io.InputStream input,
1925         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1926         throws java.io.IOException {
1927       return PARSER.parseFrom(input, extensionRegistry);
1928     }
parseDelimitedFrom(java.io.InputStream input)1929     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input)
1930         throws java.io.IOException {
1931       return PARSER.parseDelimitedFrom(input);
1932     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1933     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(
1934         java.io.InputStream input,
1935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1936         throws java.io.IOException {
1937       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1938     }
parseFrom( com.google.protobuf.CodedInputStream input)1939     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
1940         com.google.protobuf.CodedInputStream input)
1941         throws java.io.IOException {
1942       return PARSER.parseFrom(input);
1943     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1944     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(
1945         com.google.protobuf.CodedInputStream input,
1946         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1947         throws java.io.IOException {
1948       return PARSER.parseFrom(input, extensionRegistry);
1949     }
1950 
newBuilder()1951     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1952     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype)1953     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) {
1954       return newBuilder().mergeFrom(prototype);
1955     }
toBuilder()1956     public Builder toBuilder() { return newBuilder(this); }
1957 
1958     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1959     protected Builder newBuilderForType(
1960         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1961       Builder builder = new Builder(parent);
1962       return builder;
1963     }
1964     /**
1965      * Protobuf type {@code ColumnPrefixFilter}
1966      */
1967     public static final class Builder extends
1968         com.google.protobuf.GeneratedMessage.Builder<Builder>
1969        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder {
1970       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1971           getDescriptor() {
1972         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor;
1973       }
1974 
1975       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1976           internalGetFieldAccessorTable() {
1977         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable
1978             .ensureFieldAccessorsInitialized(
1979                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class);
1980       }
1981 
1982       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder()
Builder()1983       private Builder() {
1984         maybeForceBuilderInitialization();
1985       }
1986 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1987       private Builder(
1988           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1989         super(parent);
1990         maybeForceBuilderInitialization();
1991       }
maybeForceBuilderInitialization()1992       private void maybeForceBuilderInitialization() {
1993         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1994         }
1995       }
create()1996       private static Builder create() {
1997         return new Builder();
1998       }
1999 
clear()2000       public Builder clear() {
2001         super.clear();
2002         prefix_ = com.google.protobuf.ByteString.EMPTY;
2003         bitField0_ = (bitField0_ & ~0x00000001);
2004         return this;
2005       }
2006 
clone()2007       public Builder clone() {
2008         return create().mergeFrom(buildPartial());
2009       }
2010 
2011       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()2012           getDescriptorForType() {
2013         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor;
2014       }
2015 
getDefaultInstanceForType()2016       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() {
2017         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance();
2018       }
2019 
build()2020       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() {
2021         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial();
2022         if (!result.isInitialized()) {
2023           throw newUninitializedMessageException(result);
2024         }
2025         return result;
2026       }
2027 
buildPartial()2028       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() {
2029         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this);
2030         int from_bitField0_ = bitField0_;
2031         int to_bitField0_ = 0;
2032         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2033           to_bitField0_ |= 0x00000001;
2034         }
2035         result.prefix_ = prefix_;
2036         result.bitField0_ = to_bitField0_;
2037         onBuilt();
2038         return result;
2039       }
2040 
mergeFrom(com.google.protobuf.Message other)2041       public Builder mergeFrom(com.google.protobuf.Message other) {
2042         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) {
2043           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other);
2044         } else {
2045           super.mergeFrom(other);
2046           return this;
2047         }
2048       }
2049 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other)2050       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) {
2051         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this;
2052         if (other.hasPrefix()) {
2053           setPrefix(other.getPrefix());
2054         }
2055         this.mergeUnknownFields(other.getUnknownFields());
2056         return this;
2057       }
2058 
isInitialized()2059       public final boolean isInitialized() {
2060         if (!hasPrefix()) {
2061 
2062           return false;
2063         }
2064         return true;
2065       }
2066 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2067       public Builder mergeFrom(
2068           com.google.protobuf.CodedInputStream input,
2069           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2070           throws java.io.IOException {
2071         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parsedMessage = null;
2072         try {
2073           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2074         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2075           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage();
2076           throw e;
2077         } finally {
2078           if (parsedMessage != null) {
2079             mergeFrom(parsedMessage);
2080           }
2081         }
2082         return this;
2083       }
2084       private int bitField0_;
2085 
2086       // required bytes prefix = 1;
2087       private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY;
2088       /**
2089        * <code>required bytes prefix = 1;</code>
2090        */
hasPrefix()2091       public boolean hasPrefix() {
2092         return ((bitField0_ & 0x00000001) == 0x00000001);
2093       }
2094       /**
2095        * <code>required bytes prefix = 1;</code>
2096        */
getPrefix()2097       public com.google.protobuf.ByteString getPrefix() {
2098         return prefix_;
2099       }
2100       /**
2101        * <code>required bytes prefix = 1;</code>
2102        */
setPrefix(com.google.protobuf.ByteString value)2103       public Builder setPrefix(com.google.protobuf.ByteString value) {
2104         if (value == null) {
2105     throw new NullPointerException();
2106   }
2107   bitField0_ |= 0x00000001;
2108         prefix_ = value;
2109         onChanged();
2110         return this;
2111       }
2112       /**
2113        * <code>required bytes prefix = 1;</code>
2114        */
clearPrefix()2115       public Builder clearPrefix() {
2116         bitField0_ = (bitField0_ & ~0x00000001);
2117         prefix_ = getDefaultInstance().getPrefix();
2118         onChanged();
2119         return this;
2120       }
2121 
2122       // @@protoc_insertion_point(builder_scope:ColumnPrefixFilter)
2123     }
2124 
2125     static {
2126       defaultInstance = new ColumnPrefixFilter(true);
defaultInstance.initFields()2127       defaultInstance.initFields();
2128     }
2129 
2130     // @@protoc_insertion_point(class_scope:ColumnPrefixFilter)
2131   }
2132 
2133   public interface ColumnRangeFilterOrBuilder
2134       extends com.google.protobuf.MessageOrBuilder {
2135 
2136     // optional bytes min_column = 1;
2137     /**
2138      * <code>optional bytes min_column = 1;</code>
2139      */
hasMinColumn()2140     boolean hasMinColumn();
2141     /**
2142      * <code>optional bytes min_column = 1;</code>
2143      */
getMinColumn()2144     com.google.protobuf.ByteString getMinColumn();
2145 
2146     // optional bool min_column_inclusive = 2;
2147     /**
2148      * <code>optional bool min_column_inclusive = 2;</code>
2149      */
hasMinColumnInclusive()2150     boolean hasMinColumnInclusive();
2151     /**
2152      * <code>optional bool min_column_inclusive = 2;</code>
2153      */
getMinColumnInclusive()2154     boolean getMinColumnInclusive();
2155 
2156     // optional bytes max_column = 3;
2157     /**
2158      * <code>optional bytes max_column = 3;</code>
2159      */
hasMaxColumn()2160     boolean hasMaxColumn();
2161     /**
2162      * <code>optional bytes max_column = 3;</code>
2163      */
getMaxColumn()2164     com.google.protobuf.ByteString getMaxColumn();
2165 
2166     // optional bool max_column_inclusive = 4;
2167     /**
2168      * <code>optional bool max_column_inclusive = 4;</code>
2169      */
hasMaxColumnInclusive()2170     boolean hasMaxColumnInclusive();
2171     /**
2172      * <code>optional bool max_column_inclusive = 4;</code>
2173      */
getMaxColumnInclusive()2174     boolean getMaxColumnInclusive();
2175   }
2176   /**
2177    * Protobuf type {@code ColumnRangeFilter}
2178    */
2179   public static final class ColumnRangeFilter extends
2180       com.google.protobuf.GeneratedMessage
2181       implements ColumnRangeFilterOrBuilder {
2182     // Use ColumnRangeFilter.newBuilder() to construct.
ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)2183     private ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2184       super(builder);
2185       this.unknownFields = builder.getUnknownFields();
2186     }
ColumnRangeFilter(boolean noInit)2187     private ColumnRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2188 
2189     private static final ColumnRangeFilter defaultInstance;
getDefaultInstance()2190     public static ColumnRangeFilter getDefaultInstance() {
2191       return defaultInstance;
2192     }
2193 
getDefaultInstanceForType()2194     public ColumnRangeFilter getDefaultInstanceForType() {
2195       return defaultInstance;
2196     }
2197 
2198     private final com.google.protobuf.UnknownFieldSet unknownFields;
2199     @java.lang.Override
2200     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2201         getUnknownFields() {
2202       return this.unknownFields;
2203     }
ColumnRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2204     private ColumnRangeFilter(
2205         com.google.protobuf.CodedInputStream input,
2206         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2207         throws com.google.protobuf.InvalidProtocolBufferException {
2208       initFields();
2209       int mutable_bitField0_ = 0;
2210       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2211           com.google.protobuf.UnknownFieldSet.newBuilder();
2212       try {
2213         boolean done = false;
2214         while (!done) {
2215           int tag = input.readTag();
2216           switch (tag) {
2217             case 0:
2218               done = true;
2219               break;
2220             default: {
2221               if (!parseUnknownField(input, unknownFields,
2222                                      extensionRegistry, tag)) {
2223                 done = true;
2224               }
2225               break;
2226             }
2227             case 10: {
2228               bitField0_ |= 0x00000001;
2229               minColumn_ = input.readBytes();
2230               break;
2231             }
2232             case 16: {
2233               bitField0_ |= 0x00000002;
2234               minColumnInclusive_ = input.readBool();
2235               break;
2236             }
2237             case 26: {
2238               bitField0_ |= 0x00000004;
2239               maxColumn_ = input.readBytes();
2240               break;
2241             }
2242             case 32: {
2243               bitField0_ |= 0x00000008;
2244               maxColumnInclusive_ = input.readBool();
2245               break;
2246             }
2247           }
2248         }
2249       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2250         throw e.setUnfinishedMessage(this);
2251       } catch (java.io.IOException e) {
2252         throw new com.google.protobuf.InvalidProtocolBufferException(
2253             e.getMessage()).setUnfinishedMessage(this);
2254       } finally {
2255         this.unknownFields = unknownFields.build();
2256         makeExtensionsImmutable();
2257       }
2258     }
2259     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2260         getDescriptor() {
2261       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor;
2262     }
2263 
2264     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2265         internalGetFieldAccessorTable() {
2266       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable
2267           .ensureFieldAccessorsInitialized(
2268               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class);
2269     }
2270 
2271     public static com.google.protobuf.Parser<ColumnRangeFilter> PARSER =
2272         new com.google.protobuf.AbstractParser<ColumnRangeFilter>() {
2273       public ColumnRangeFilter parsePartialFrom(
2274           com.google.protobuf.CodedInputStream input,
2275           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2276           throws com.google.protobuf.InvalidProtocolBufferException {
2277         return new ColumnRangeFilter(input, extensionRegistry);
2278       }
2279     };
2280 
2281     @java.lang.Override
getParserForType()2282     public com.google.protobuf.Parser<ColumnRangeFilter> getParserForType() {
2283       return PARSER;
2284     }
2285 
2286     private int bitField0_;
2287     // optional bytes min_column = 1;
2288     public static final int MIN_COLUMN_FIELD_NUMBER = 1;
2289     private com.google.protobuf.ByteString minColumn_;
2290     /**
2291      * <code>optional bytes min_column = 1;</code>
2292      */
hasMinColumn()2293     public boolean hasMinColumn() {
2294       return ((bitField0_ & 0x00000001) == 0x00000001);
2295     }
2296     /**
2297      * <code>optional bytes min_column = 1;</code>
2298      */
getMinColumn()2299     public com.google.protobuf.ByteString getMinColumn() {
2300       return minColumn_;
2301     }
2302 
2303     // optional bool min_column_inclusive = 2;
2304     public static final int MIN_COLUMN_INCLUSIVE_FIELD_NUMBER = 2;
2305     private boolean minColumnInclusive_;
2306     /**
2307      * <code>optional bool min_column_inclusive = 2;</code>
2308      */
hasMinColumnInclusive()2309     public boolean hasMinColumnInclusive() {
2310       return ((bitField0_ & 0x00000002) == 0x00000002);
2311     }
2312     /**
2313      * <code>optional bool min_column_inclusive = 2;</code>
2314      */
getMinColumnInclusive()2315     public boolean getMinColumnInclusive() {
2316       return minColumnInclusive_;
2317     }
2318 
2319     // optional bytes max_column = 3;
2320     public static final int MAX_COLUMN_FIELD_NUMBER = 3;
2321     private com.google.protobuf.ByteString maxColumn_;
2322     /**
2323      * <code>optional bytes max_column = 3;</code>
2324      */
hasMaxColumn()2325     public boolean hasMaxColumn() {
2326       return ((bitField0_ & 0x00000004) == 0x00000004);
2327     }
2328     /**
2329      * <code>optional bytes max_column = 3;</code>
2330      */
getMaxColumn()2331     public com.google.protobuf.ByteString getMaxColumn() {
2332       return maxColumn_;
2333     }
2334 
2335     // optional bool max_column_inclusive = 4;
2336     public static final int MAX_COLUMN_INCLUSIVE_FIELD_NUMBER = 4;
2337     private boolean maxColumnInclusive_;
2338     /**
2339      * <code>optional bool max_column_inclusive = 4;</code>
2340      */
hasMaxColumnInclusive()2341     public boolean hasMaxColumnInclusive() {
2342       return ((bitField0_ & 0x00000008) == 0x00000008);
2343     }
2344     /**
2345      * <code>optional bool max_column_inclusive = 4;</code>
2346      */
getMaxColumnInclusive()2347     public boolean getMaxColumnInclusive() {
2348       return maxColumnInclusive_;
2349     }
2350 
initFields()2351     private void initFields() {
2352       minColumn_ = com.google.protobuf.ByteString.EMPTY;
2353       minColumnInclusive_ = false;
2354       maxColumn_ = com.google.protobuf.ByteString.EMPTY;
2355       maxColumnInclusive_ = false;
2356     }
2357     private byte memoizedIsInitialized = -1;
isInitialized()2358     public final boolean isInitialized() {
2359       byte isInitialized = memoizedIsInitialized;
2360       if (isInitialized != -1) return isInitialized == 1;
2361 
2362       memoizedIsInitialized = 1;
2363       return true;
2364     }
2365 
writeTo(com.google.protobuf.CodedOutputStream output)2366     public void writeTo(com.google.protobuf.CodedOutputStream output)
2367                         throws java.io.IOException {
2368       getSerializedSize();
2369       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2370         output.writeBytes(1, minColumn_);
2371       }
2372       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2373         output.writeBool(2, minColumnInclusive_);
2374       }
2375       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2376         output.writeBytes(3, maxColumn_);
2377       }
2378       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2379         output.writeBool(4, maxColumnInclusive_);
2380       }
2381       getUnknownFields().writeTo(output);
2382     }
2383 
2384     private int memoizedSerializedSize = -1;
getSerializedSize()2385     public int getSerializedSize() {
2386       int size = memoizedSerializedSize;
2387       if (size != -1) return size;
2388 
2389       size = 0;
2390       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2391         size += com.google.protobuf.CodedOutputStream
2392           .computeBytesSize(1, minColumn_);
2393       }
2394       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2395         size += com.google.protobuf.CodedOutputStream
2396           .computeBoolSize(2, minColumnInclusive_);
2397       }
2398       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2399         size += com.google.protobuf.CodedOutputStream
2400           .computeBytesSize(3, maxColumn_);
2401       }
2402       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2403         size += com.google.protobuf.CodedOutputStream
2404           .computeBoolSize(4, maxColumnInclusive_);
2405       }
2406       size += getUnknownFields().getSerializedSize();
2407       memoizedSerializedSize = size;
2408       return size;
2409     }
2410 
2411     private static final long serialVersionUID = 0L;
2412     @java.lang.Override
writeReplace()2413     protected java.lang.Object writeReplace()
2414         throws java.io.ObjectStreamException {
2415       return super.writeReplace();
2416     }
2417 
2418     @java.lang.Override
equals(final java.lang.Object obj)2419     public boolean equals(final java.lang.Object obj) {
2420       if (obj == this) {
2421        return true;
2422       }
2423       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)) {
2424         return super.equals(obj);
2425       }
2426       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj;
2427 
2428       boolean result = true;
2429       result = result && (hasMinColumn() == other.hasMinColumn());
2430       if (hasMinColumn()) {
2431         result = result && getMinColumn()
2432             .equals(other.getMinColumn());
2433       }
2434       result = result && (hasMinColumnInclusive() == other.hasMinColumnInclusive());
2435       if (hasMinColumnInclusive()) {
2436         result = result && (getMinColumnInclusive()
2437             == other.getMinColumnInclusive());
2438       }
2439       result = result && (hasMaxColumn() == other.hasMaxColumn());
2440       if (hasMaxColumn()) {
2441         result = result && getMaxColumn()
2442             .equals(other.getMaxColumn());
2443       }
2444       result = result && (hasMaxColumnInclusive() == other.hasMaxColumnInclusive());
2445       if (hasMaxColumnInclusive()) {
2446         result = result && (getMaxColumnInclusive()
2447             == other.getMaxColumnInclusive());
2448       }
2449       result = result &&
2450           getUnknownFields().equals(other.getUnknownFields());
2451       return result;
2452     }
2453 
2454     private int memoizedHashCode = 0;
2455     @java.lang.Override
hashCode()2456     public int hashCode() {
2457       if (memoizedHashCode != 0) {
2458         return memoizedHashCode;
2459       }
2460       int hash = 41;
2461       hash = (19 * hash) + getDescriptorForType().hashCode();
2462       if (hasMinColumn()) {
2463         hash = (37 * hash) + MIN_COLUMN_FIELD_NUMBER;
2464         hash = (53 * hash) + getMinColumn().hashCode();
2465       }
2466       if (hasMinColumnInclusive()) {
2467         hash = (37 * hash) + MIN_COLUMN_INCLUSIVE_FIELD_NUMBER;
2468         hash = (53 * hash) + hashBoolean(getMinColumnInclusive());
2469       }
2470       if (hasMaxColumn()) {
2471         hash = (37 * hash) + MAX_COLUMN_FIELD_NUMBER;
2472         hash = (53 * hash) + getMaxColumn().hashCode();
2473       }
2474       if (hasMaxColumnInclusive()) {
2475         hash = (37 * hash) + MAX_COLUMN_INCLUSIVE_FIELD_NUMBER;
2476         hash = (53 * hash) + hashBoolean(getMaxColumnInclusive());
2477       }
2478       hash = (29 * hash) + getUnknownFields().hashCode();
2479       memoizedHashCode = hash;
2480       return hash;
2481     }
2482 
parseFrom( com.google.protobuf.ByteString data)2483     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
2484         com.google.protobuf.ByteString data)
2485         throws com.google.protobuf.InvalidProtocolBufferException {
2486       return PARSER.parseFrom(data);
2487     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2488     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
2489         com.google.protobuf.ByteString data,
2490         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2491         throws com.google.protobuf.InvalidProtocolBufferException {
2492       return PARSER.parseFrom(data, extensionRegistry);
2493     }
parseFrom(byte[] data)2494     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data)
2495         throws com.google.protobuf.InvalidProtocolBufferException {
2496       return PARSER.parseFrom(data);
2497     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2498     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
2499         byte[] data,
2500         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2501         throws com.google.protobuf.InvalidProtocolBufferException {
2502       return PARSER.parseFrom(data, extensionRegistry);
2503     }
parseFrom(java.io.InputStream input)2504     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input)
2505         throws java.io.IOException {
2506       return PARSER.parseFrom(input);
2507     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2508     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
2509         java.io.InputStream input,
2510         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2511         throws java.io.IOException {
2512       return PARSER.parseFrom(input, extensionRegistry);
2513     }
parseDelimitedFrom(java.io.InputStream input)2514     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input)
2515         throws java.io.IOException {
2516       return PARSER.parseDelimitedFrom(input);
2517     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2518     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(
2519         java.io.InputStream input,
2520         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2521         throws java.io.IOException {
2522       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2523     }
parseFrom( com.google.protobuf.CodedInputStream input)2524     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
2525         com.google.protobuf.CodedInputStream input)
2526         throws java.io.IOException {
2527       return PARSER.parseFrom(input);
2528     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2529     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(
2530         com.google.protobuf.CodedInputStream input,
2531         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2532         throws java.io.IOException {
2533       return PARSER.parseFrom(input, extensionRegistry);
2534     }
2535 
newBuilder()2536     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2537     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype)2538     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) {
2539       return newBuilder().mergeFrom(prototype);
2540     }
toBuilder()2541     public Builder toBuilder() { return newBuilder(this); }
2542 
2543     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2544     protected Builder newBuilderForType(
2545         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2546       Builder builder = new Builder(parent);
2547       return builder;
2548     }
2549     /**
2550      * Protobuf type {@code ColumnRangeFilter}
2551      */
2552     public static final class Builder extends
2553         com.google.protobuf.GeneratedMessage.Builder<Builder>
2554        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder {
2555       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2556           getDescriptor() {
2557         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor;
2558       }
2559 
2560       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2561           internalGetFieldAccessorTable() {
2562         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable
2563             .ensureFieldAccessorsInitialized(
2564                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class);
2565       }
2566 
2567       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder()
Builder()2568       private Builder() {
2569         maybeForceBuilderInitialization();
2570       }
2571 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2572       private Builder(
2573           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2574         super(parent);
2575         maybeForceBuilderInitialization();
2576       }
maybeForceBuilderInitialization()2577       private void maybeForceBuilderInitialization() {
2578         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2579         }
2580       }
create()2581       private static Builder create() {
2582         return new Builder();
2583       }
2584 
clear()2585       public Builder clear() {
2586         super.clear();
2587         minColumn_ = com.google.protobuf.ByteString.EMPTY;
2588         bitField0_ = (bitField0_ & ~0x00000001);
2589         minColumnInclusive_ = false;
2590         bitField0_ = (bitField0_ & ~0x00000002);
2591         maxColumn_ = com.google.protobuf.ByteString.EMPTY;
2592         bitField0_ = (bitField0_ & ~0x00000004);
2593         maxColumnInclusive_ = false;
2594         bitField0_ = (bitField0_ & ~0x00000008);
2595         return this;
2596       }
2597 
clone()2598       public Builder clone() {
2599         return create().mergeFrom(buildPartial());
2600       }
2601 
2602       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()2603           getDescriptorForType() {
2604         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor;
2605       }
2606 
getDefaultInstanceForType()2607       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() {
2608         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance();
2609       }
2610 
build()2611       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() {
2612         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial();
2613         if (!result.isInitialized()) {
2614           throw newUninitializedMessageException(result);
2615         }
2616         return result;
2617       }
2618 
buildPartial()2619       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() {
2620         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this);
2621         int from_bitField0_ = bitField0_;
2622         int to_bitField0_ = 0;
2623         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2624           to_bitField0_ |= 0x00000001;
2625         }
2626         result.minColumn_ = minColumn_;
2627         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
2628           to_bitField0_ |= 0x00000002;
2629         }
2630         result.minColumnInclusive_ = minColumnInclusive_;
2631         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
2632           to_bitField0_ |= 0x00000004;
2633         }
2634         result.maxColumn_ = maxColumn_;
2635         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
2636           to_bitField0_ |= 0x00000008;
2637         }
2638         result.maxColumnInclusive_ = maxColumnInclusive_;
2639         result.bitField0_ = to_bitField0_;
2640         onBuilt();
2641         return result;
2642       }
2643 
mergeFrom(com.google.protobuf.Message other)2644       public Builder mergeFrom(com.google.protobuf.Message other) {
2645         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) {
2646           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other);
2647         } else {
2648           super.mergeFrom(other);
2649           return this;
2650         }
2651       }
2652 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other)2653       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) {
2654         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this;
2655         if (other.hasMinColumn()) {
2656           setMinColumn(other.getMinColumn());
2657         }
2658         if (other.hasMinColumnInclusive()) {
2659           setMinColumnInclusive(other.getMinColumnInclusive());
2660         }
2661         if (other.hasMaxColumn()) {
2662           setMaxColumn(other.getMaxColumn());
2663         }
2664         if (other.hasMaxColumnInclusive()) {
2665           setMaxColumnInclusive(other.getMaxColumnInclusive());
2666         }
2667         this.mergeUnknownFields(other.getUnknownFields());
2668         return this;
2669       }
2670 
isInitialized()2671       public final boolean isInitialized() {
2672         return true;
2673       }
2674 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2675       public Builder mergeFrom(
2676           com.google.protobuf.CodedInputStream input,
2677           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2678           throws java.io.IOException {
2679         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parsedMessage = null;
2680         try {
2681           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2682         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2683           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) e.getUnfinishedMessage();
2684           throw e;
2685         } finally {
2686           if (parsedMessage != null) {
2687             mergeFrom(parsedMessage);
2688           }
2689         }
2690         return this;
2691       }
2692       private int bitField0_;
2693 
2694       // optional bytes min_column = 1;
2695       private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY;
2696       /**
2697        * <code>optional bytes min_column = 1;</code>
2698        */
hasMinColumn()2699       public boolean hasMinColumn() {
2700         return ((bitField0_ & 0x00000001) == 0x00000001);
2701       }
2702       /**
2703        * <code>optional bytes min_column = 1;</code>
2704        */
getMinColumn()2705       public com.google.protobuf.ByteString getMinColumn() {
2706         return minColumn_;
2707       }
2708       /**
2709        * <code>optional bytes min_column = 1;</code>
2710        */
setMinColumn(com.google.protobuf.ByteString value)2711       public Builder setMinColumn(com.google.protobuf.ByteString value) {
2712         if (value == null) {
2713     throw new NullPointerException();
2714   }
2715   bitField0_ |= 0x00000001;
2716         minColumn_ = value;
2717         onChanged();
2718         return this;
2719       }
2720       /**
2721        * <code>optional bytes min_column = 1;</code>
2722        */
clearMinColumn()2723       public Builder clearMinColumn() {
2724         bitField0_ = (bitField0_ & ~0x00000001);
2725         minColumn_ = getDefaultInstance().getMinColumn();
2726         onChanged();
2727         return this;
2728       }
2729 
2730       // optional bool min_column_inclusive = 2;
2731       private boolean minColumnInclusive_ ;
2732       /**
2733        * <code>optional bool min_column_inclusive = 2;</code>
2734        */
hasMinColumnInclusive()2735       public boolean hasMinColumnInclusive() {
2736         return ((bitField0_ & 0x00000002) == 0x00000002);
2737       }
2738       /**
2739        * <code>optional bool min_column_inclusive = 2;</code>
2740        */
getMinColumnInclusive()2741       public boolean getMinColumnInclusive() {
2742         return minColumnInclusive_;
2743       }
2744       /**
2745        * <code>optional bool min_column_inclusive = 2;</code>
2746        */
setMinColumnInclusive(boolean value)2747       public Builder setMinColumnInclusive(boolean value) {
2748         bitField0_ |= 0x00000002;
2749         minColumnInclusive_ = value;
2750         onChanged();
2751         return this;
2752       }
2753       /**
2754        * <code>optional bool min_column_inclusive = 2;</code>
2755        */
clearMinColumnInclusive()2756       public Builder clearMinColumnInclusive() {
2757         bitField0_ = (bitField0_ & ~0x00000002);
2758         minColumnInclusive_ = false;
2759         onChanged();
2760         return this;
2761       }
2762 
2763       // optional bytes max_column = 3;
2764       private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY;
2765       /**
2766        * <code>optional bytes max_column = 3;</code>
2767        */
hasMaxColumn()2768       public boolean hasMaxColumn() {
2769         return ((bitField0_ & 0x00000004) == 0x00000004);
2770       }
2771       /**
2772        * <code>optional bytes max_column = 3;</code>
2773        */
getMaxColumn()2774       public com.google.protobuf.ByteString getMaxColumn() {
2775         return maxColumn_;
2776       }
2777       /**
2778        * <code>optional bytes max_column = 3;</code>
2779        */
setMaxColumn(com.google.protobuf.ByteString value)2780       public Builder setMaxColumn(com.google.protobuf.ByteString value) {
2781         if (value == null) {
2782     throw new NullPointerException();
2783   }
2784   bitField0_ |= 0x00000004;
2785         maxColumn_ = value;
2786         onChanged();
2787         return this;
2788       }
2789       /**
2790        * <code>optional bytes max_column = 3;</code>
2791        */
clearMaxColumn()2792       public Builder clearMaxColumn() {
2793         bitField0_ = (bitField0_ & ~0x00000004);
2794         maxColumn_ = getDefaultInstance().getMaxColumn();
2795         onChanged();
2796         return this;
2797       }
2798 
2799       // optional bool max_column_inclusive = 4;
2800       private boolean maxColumnInclusive_ ;
2801       /**
2802        * <code>optional bool max_column_inclusive = 4;</code>
2803        */
hasMaxColumnInclusive()2804       public boolean hasMaxColumnInclusive() {
2805         return ((bitField0_ & 0x00000008) == 0x00000008);
2806       }
2807       /**
2808        * <code>optional bool max_column_inclusive = 4;</code>
2809        */
getMaxColumnInclusive()2810       public boolean getMaxColumnInclusive() {
2811         return maxColumnInclusive_;
2812       }
2813       /**
2814        * <code>optional bool max_column_inclusive = 4;</code>
2815        */
setMaxColumnInclusive(boolean value)2816       public Builder setMaxColumnInclusive(boolean value) {
2817         bitField0_ |= 0x00000008;
2818         maxColumnInclusive_ = value;
2819         onChanged();
2820         return this;
2821       }
2822       /**
2823        * <code>optional bool max_column_inclusive = 4;</code>
2824        */
clearMaxColumnInclusive()2825       public Builder clearMaxColumnInclusive() {
2826         bitField0_ = (bitField0_ & ~0x00000008);
2827         maxColumnInclusive_ = false;
2828         onChanged();
2829         return this;
2830       }
2831 
2832       // @@protoc_insertion_point(builder_scope:ColumnRangeFilter)
2833     }
2834 
2835     static {
2836       defaultInstance = new ColumnRangeFilter(true);
defaultInstance.initFields()2837       defaultInstance.initFields();
2838     }
2839 
2840     // @@protoc_insertion_point(class_scope:ColumnRangeFilter)
2841   }
2842 
2843   public interface CompareFilterOrBuilder
2844       extends com.google.protobuf.MessageOrBuilder {
2845 
2846     // required .CompareType compare_op = 1;
2847     /**
2848      * <code>required .CompareType compare_op = 1;</code>
2849      */
hasCompareOp()2850     boolean hasCompareOp();
2851     /**
2852      * <code>required .CompareType compare_op = 1;</code>
2853      */
getCompareOp()2854     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp();
2855 
2856     // optional .Comparator comparator = 2;
2857     /**
2858      * <code>optional .Comparator comparator = 2;</code>
2859      */
hasComparator()2860     boolean hasComparator();
2861     /**
2862      * <code>optional .Comparator comparator = 2;</code>
2863      */
getComparator()2864     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
2865     /**
2866      * <code>optional .Comparator comparator = 2;</code>
2867      */
getComparatorOrBuilder()2868     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
2869   }
2870   /**
2871    * Protobuf type {@code CompareFilter}
2872    */
2873   public static final class CompareFilter extends
2874       com.google.protobuf.GeneratedMessage
2875       implements CompareFilterOrBuilder {
2876     // Use CompareFilter.newBuilder() to construct.
CompareFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)2877     private CompareFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2878       super(builder);
2879       this.unknownFields = builder.getUnknownFields();
2880     }
CompareFilter(boolean noInit)2881     private CompareFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2882 
2883     private static final CompareFilter defaultInstance;
getDefaultInstance()2884     public static CompareFilter getDefaultInstance() {
2885       return defaultInstance;
2886     }
2887 
getDefaultInstanceForType()2888     public CompareFilter getDefaultInstanceForType() {
2889       return defaultInstance;
2890     }
2891 
2892     private final com.google.protobuf.UnknownFieldSet unknownFields;
2893     @java.lang.Override
2894     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2895         getUnknownFields() {
2896       return this.unknownFields;
2897     }
CompareFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2898     private CompareFilter(
2899         com.google.protobuf.CodedInputStream input,
2900         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2901         throws com.google.protobuf.InvalidProtocolBufferException {
2902       initFields();
2903       int mutable_bitField0_ = 0;
2904       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2905           com.google.protobuf.UnknownFieldSet.newBuilder();
2906       try {
2907         boolean done = false;
2908         while (!done) {
2909           int tag = input.readTag();
2910           switch (tag) {
2911             case 0:
2912               done = true;
2913               break;
2914             default: {
2915               if (!parseUnknownField(input, unknownFields,
2916                                      extensionRegistry, tag)) {
2917                 done = true;
2918               }
2919               break;
2920             }
2921             case 8: {
2922               int rawValue = input.readEnum();
2923               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
2924               if (value == null) {
2925                 unknownFields.mergeVarintField(1, rawValue);
2926               } else {
2927                 bitField0_ |= 0x00000001;
2928                 compareOp_ = value;
2929               }
2930               break;
2931             }
2932             case 18: {
2933               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
2934               if (((bitField0_ & 0x00000002) == 0x00000002)) {
2935                 subBuilder = comparator_.toBuilder();
2936               }
2937               comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
2938               if (subBuilder != null) {
2939                 subBuilder.mergeFrom(comparator_);
2940                 comparator_ = subBuilder.buildPartial();
2941               }
2942               bitField0_ |= 0x00000002;
2943               break;
2944             }
2945           }
2946         }
2947       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2948         throw e.setUnfinishedMessage(this);
2949       } catch (java.io.IOException e) {
2950         throw new com.google.protobuf.InvalidProtocolBufferException(
2951             e.getMessage()).setUnfinishedMessage(this);
2952       } finally {
2953         this.unknownFields = unknownFields.build();
2954         makeExtensionsImmutable();
2955       }
2956     }
2957     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2958         getDescriptor() {
2959       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor;
2960     }
2961 
2962     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2963         internalGetFieldAccessorTable() {
2964       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable
2965           .ensureFieldAccessorsInitialized(
2966               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class);
2967     }
2968 
2969     public static com.google.protobuf.Parser<CompareFilter> PARSER =
2970         new com.google.protobuf.AbstractParser<CompareFilter>() {
2971       public CompareFilter parsePartialFrom(
2972           com.google.protobuf.CodedInputStream input,
2973           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2974           throws com.google.protobuf.InvalidProtocolBufferException {
2975         return new CompareFilter(input, extensionRegistry);
2976       }
2977     };
2978 
2979     @java.lang.Override
getParserForType()2980     public com.google.protobuf.Parser<CompareFilter> getParserForType() {
2981       return PARSER;
2982     }
2983 
2984     private int bitField0_;
2985     // required .CompareType compare_op = 1;
2986     public static final int COMPARE_OP_FIELD_NUMBER = 1;
2987     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_;
2988     /**
2989      * <code>required .CompareType compare_op = 1;</code>
2990      */
hasCompareOp()2991     public boolean hasCompareOp() {
2992       return ((bitField0_ & 0x00000001) == 0x00000001);
2993     }
2994     /**
2995      * <code>required .CompareType compare_op = 1;</code>
2996      */
getCompareOp()2997     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
2998       return compareOp_;
2999     }
3000 
3001     // optional .Comparator comparator = 2;
3002     public static final int COMPARATOR_FIELD_NUMBER = 2;
3003     private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
3004     /**
3005      * <code>optional .Comparator comparator = 2;</code>
3006      */
hasComparator()3007     public boolean hasComparator() {
3008       return ((bitField0_ & 0x00000002) == 0x00000002);
3009     }
3010     /**
3011      * <code>optional .Comparator comparator = 2;</code>
3012      */
getComparator()3013     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
3014       return comparator_;
3015     }
3016     /**
3017      * <code>optional .Comparator comparator = 2;</code>
3018      */
getComparatorOrBuilder()3019     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
3020       return comparator_;
3021     }
3022 
initFields()3023     private void initFields() {
3024       compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
3025       comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
3026     }
3027     private byte memoizedIsInitialized = -1;
isInitialized()3028     public final boolean isInitialized() {
3029       byte isInitialized = memoizedIsInitialized;
3030       if (isInitialized != -1) return isInitialized == 1;
3031 
3032       if (!hasCompareOp()) {
3033         memoizedIsInitialized = 0;
3034         return false;
3035       }
3036       if (hasComparator()) {
3037         if (!getComparator().isInitialized()) {
3038           memoizedIsInitialized = 0;
3039           return false;
3040         }
3041       }
3042       memoizedIsInitialized = 1;
3043       return true;
3044     }
3045 
writeTo(com.google.protobuf.CodedOutputStream output)3046     public void writeTo(com.google.protobuf.CodedOutputStream output)
3047                         throws java.io.IOException {
3048       getSerializedSize();
3049       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3050         output.writeEnum(1, compareOp_.getNumber());
3051       }
3052       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3053         output.writeMessage(2, comparator_);
3054       }
3055       getUnknownFields().writeTo(output);
3056     }
3057 
3058     private int memoizedSerializedSize = -1;
getSerializedSize()3059     public int getSerializedSize() {
3060       int size = memoizedSerializedSize;
3061       if (size != -1) return size;
3062 
3063       size = 0;
3064       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3065         size += com.google.protobuf.CodedOutputStream
3066           .computeEnumSize(1, compareOp_.getNumber());
3067       }
3068       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3069         size += com.google.protobuf.CodedOutputStream
3070           .computeMessageSize(2, comparator_);
3071       }
3072       size += getUnknownFields().getSerializedSize();
3073       memoizedSerializedSize = size;
3074       return size;
3075     }
3076 
3077     private static final long serialVersionUID = 0L;
3078     @java.lang.Override
writeReplace()3079     protected java.lang.Object writeReplace()
3080         throws java.io.ObjectStreamException {
3081       return super.writeReplace();
3082     }
3083 
3084     @java.lang.Override
equals(final java.lang.Object obj)3085     public boolean equals(final java.lang.Object obj) {
3086       if (obj == this) {
3087        return true;
3088       }
3089       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)) {
3090         return super.equals(obj);
3091       }
3092       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj;
3093 
3094       boolean result = true;
3095       result = result && (hasCompareOp() == other.hasCompareOp());
3096       if (hasCompareOp()) {
3097         result = result &&
3098             (getCompareOp() == other.getCompareOp());
3099       }
3100       result = result && (hasComparator() == other.hasComparator());
3101       if (hasComparator()) {
3102         result = result && getComparator()
3103             .equals(other.getComparator());
3104       }
3105       result = result &&
3106           getUnknownFields().equals(other.getUnknownFields());
3107       return result;
3108     }
3109 
3110     private int memoizedHashCode = 0;
3111     @java.lang.Override
hashCode()3112     public int hashCode() {
3113       if (memoizedHashCode != 0) {
3114         return memoizedHashCode;
3115       }
3116       int hash = 41;
3117       hash = (19 * hash) + getDescriptorForType().hashCode();
3118       if (hasCompareOp()) {
3119         hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER;
3120         hash = (53 * hash) + hashEnum(getCompareOp());
3121       }
3122       if (hasComparator()) {
3123         hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
3124         hash = (53 * hash) + getComparator().hashCode();
3125       }
3126       hash = (29 * hash) + getUnknownFields().hashCode();
3127       memoizedHashCode = hash;
3128       return hash;
3129     }
3130 
parseFrom( com.google.protobuf.ByteString data)3131     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
3132         com.google.protobuf.ByteString data)
3133         throws com.google.protobuf.InvalidProtocolBufferException {
3134       return PARSER.parseFrom(data);
3135     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3136     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
3137         com.google.protobuf.ByteString data,
3138         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3139         throws com.google.protobuf.InvalidProtocolBufferException {
3140       return PARSER.parseFrom(data, extensionRegistry);
3141     }
parseFrom(byte[] data)3142     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data)
3143         throws com.google.protobuf.InvalidProtocolBufferException {
3144       return PARSER.parseFrom(data);
3145     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3146     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
3147         byte[] data,
3148         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3149         throws com.google.protobuf.InvalidProtocolBufferException {
3150       return PARSER.parseFrom(data, extensionRegistry);
3151     }
parseFrom(java.io.InputStream input)3152     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input)
3153         throws java.io.IOException {
3154       return PARSER.parseFrom(input);
3155     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3156     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
3157         java.io.InputStream input,
3158         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3159         throws java.io.IOException {
3160       return PARSER.parseFrom(input, extensionRegistry);
3161     }
parseDelimitedFrom(java.io.InputStream input)3162     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input)
3163         throws java.io.IOException {
3164       return PARSER.parseDelimitedFrom(input);
3165     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3166     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(
3167         java.io.InputStream input,
3168         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3169         throws java.io.IOException {
3170       return PARSER.parseDelimitedFrom(input, extensionRegistry);
3171     }
parseFrom( com.google.protobuf.CodedInputStream input)3172     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
3173         com.google.protobuf.CodedInputStream input)
3174         throws java.io.IOException {
3175       return PARSER.parseFrom(input);
3176     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3177     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(
3178         com.google.protobuf.CodedInputStream input,
3179         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3180         throws java.io.IOException {
3181       return PARSER.parseFrom(input, extensionRegistry);
3182     }
3183 
newBuilder()3184     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()3185     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype)3186     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) {
3187       return newBuilder().mergeFrom(prototype);
3188     }
toBuilder()3189     public Builder toBuilder() { return newBuilder(this); }
3190 
3191     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3192     protected Builder newBuilderForType(
3193         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3194       Builder builder = new Builder(parent);
3195       return builder;
3196     }
3197     /**
3198      * Protobuf type {@code CompareFilter}
3199      */
3200     public static final class Builder extends
3201         com.google.protobuf.GeneratedMessage.Builder<Builder>
3202        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder {
3203       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3204           getDescriptor() {
3205         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor;
3206       }
3207 
3208       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3209           internalGetFieldAccessorTable() {
3210         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable
3211             .ensureFieldAccessorsInitialized(
3212                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class);
3213       }
3214 
3215       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder()
Builder()3216       private Builder() {
3217         maybeForceBuilderInitialization();
3218       }
3219 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3220       private Builder(
3221           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3222         super(parent);
3223         maybeForceBuilderInitialization();
3224       }
maybeForceBuilderInitialization()3225       private void maybeForceBuilderInitialization() {
3226         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3227           getComparatorFieldBuilder();
3228         }
3229       }
create()3230       private static Builder create() {
3231         return new Builder();
3232       }
3233 
clear()3234       public Builder clear() {
3235         super.clear();
3236         compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
3237         bitField0_ = (bitField0_ & ~0x00000001);
3238         if (comparatorBuilder_ == null) {
3239           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
3240         } else {
3241           comparatorBuilder_.clear();
3242         }
3243         bitField0_ = (bitField0_ & ~0x00000002);
3244         return this;
3245       }
3246 
clone()3247       public Builder clone() {
3248         return create().mergeFrom(buildPartial());
3249       }
3250 
3251       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()3252           getDescriptorForType() {
3253         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor;
3254       }
3255 
getDefaultInstanceForType()3256       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() {
3257         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
3258       }
3259 
build()3260       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() {
3261         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial();
3262         if (!result.isInitialized()) {
3263           throw newUninitializedMessageException(result);
3264         }
3265         return result;
3266       }
3267 
buildPartial()3268       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() {
3269         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this);
3270         int from_bitField0_ = bitField0_;
3271         int to_bitField0_ = 0;
3272         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3273           to_bitField0_ |= 0x00000001;
3274         }
3275         result.compareOp_ = compareOp_;
3276         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3277           to_bitField0_ |= 0x00000002;
3278         }
3279         if (comparatorBuilder_ == null) {
3280           result.comparator_ = comparator_;
3281         } else {
3282           result.comparator_ = comparatorBuilder_.build();
3283         }
3284         result.bitField0_ = to_bitField0_;
3285         onBuilt();
3286         return result;
3287       }
3288 
mergeFrom(com.google.protobuf.Message other)3289       public Builder mergeFrom(com.google.protobuf.Message other) {
3290         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) {
3291           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other);
3292         } else {
3293           super.mergeFrom(other);
3294           return this;
3295         }
3296       }
3297 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other)3298       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) {
3299         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this;
3300         if (other.hasCompareOp()) {
3301           setCompareOp(other.getCompareOp());
3302         }
3303         if (other.hasComparator()) {
3304           mergeComparator(other.getComparator());
3305         }
3306         this.mergeUnknownFields(other.getUnknownFields());
3307         return this;
3308       }
3309 
isInitialized()3310       public final boolean isInitialized() {
3311         if (!hasCompareOp()) {
3312 
3313           return false;
3314         }
3315         if (hasComparator()) {
3316           if (!getComparator().isInitialized()) {
3317 
3318             return false;
3319           }
3320         }
3321         return true;
3322       }
3323 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3324       public Builder mergeFrom(
3325           com.google.protobuf.CodedInputStream input,
3326           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3327           throws java.io.IOException {
3328         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parsedMessage = null;
3329         try {
3330           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3331         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3332           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) e.getUnfinishedMessage();
3333           throw e;
3334         } finally {
3335           if (parsedMessage != null) {
3336             mergeFrom(parsedMessage);
3337           }
3338         }
3339         return this;
3340       }
3341       private int bitField0_;
3342 
3343       // required .CompareType compare_op = 1;
3344       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
3345       /**
3346        * <code>required .CompareType compare_op = 1;</code>
3347        */
hasCompareOp()3348       public boolean hasCompareOp() {
3349         return ((bitField0_ & 0x00000001) == 0x00000001);
3350       }
3351       /**
3352        * <code>required .CompareType compare_op = 1;</code>
3353        */
getCompareOp()3354       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
3355         return compareOp_;
3356       }
3357       /**
3358        * <code>required .CompareType compare_op = 1;</code>
3359        */
setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value)3360       public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
3361         if (value == null) {
3362           throw new NullPointerException();
3363         }
3364         bitField0_ |= 0x00000001;
3365         compareOp_ = value;
3366         onChanged();
3367         return this;
3368       }
3369       /**
3370        * <code>required .CompareType compare_op = 1;</code>
3371        */
clearCompareOp()3372       public Builder clearCompareOp() {
3373         bitField0_ = (bitField0_ & ~0x00000001);
3374         compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
3375         onChanged();
3376         return this;
3377       }
3378 
3379       // optional .Comparator comparator = 2;
3380       private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
3381       private com.google.protobuf.SingleFieldBuilder<
3382           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
3383       /**
3384        * <code>optional .Comparator comparator = 2;</code>
3385        */
hasComparator()3386       public boolean hasComparator() {
3387         return ((bitField0_ & 0x00000002) == 0x00000002);
3388       }
3389       /**
3390        * <code>optional .Comparator comparator = 2;</code>
3391        */
getComparator()3392       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
3393         if (comparatorBuilder_ == null) {
3394           return comparator_;
3395         } else {
3396           return comparatorBuilder_.getMessage();
3397         }
3398       }
3399       /**
3400        * <code>optional .Comparator comparator = 2;</code>
3401        */
setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)3402       public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
3403         if (comparatorBuilder_ == null) {
3404           if (value == null) {
3405             throw new NullPointerException();
3406           }
3407           comparator_ = value;
3408           onChanged();
3409         } else {
3410           comparatorBuilder_.setMessage(value);
3411         }
3412         bitField0_ |= 0x00000002;
3413         return this;
3414       }
3415       /**
3416        * <code>optional .Comparator comparator = 2;</code>
3417        */
setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue)3418       public Builder setComparator(
3419           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
3420         if (comparatorBuilder_ == null) {
3421           comparator_ = builderForValue.build();
3422           onChanged();
3423         } else {
3424           comparatorBuilder_.setMessage(builderForValue.build());
3425         }
3426         bitField0_ |= 0x00000002;
3427         return this;
3428       }
3429       /**
3430        * <code>optional .Comparator comparator = 2;</code>
3431        */
mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)3432       public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
3433         if (comparatorBuilder_ == null) {
3434           if (((bitField0_ & 0x00000002) == 0x00000002) &&
3435               comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
3436             comparator_ =
3437               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
3438           } else {
3439             comparator_ = value;
3440           }
3441           onChanged();
3442         } else {
3443           comparatorBuilder_.mergeFrom(value);
3444         }
3445         bitField0_ |= 0x00000002;
3446         return this;
3447       }
3448       /**
3449        * <code>optional .Comparator comparator = 2;</code>
3450        */
clearComparator()3451       public Builder clearComparator() {
3452         if (comparatorBuilder_ == null) {
3453           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
3454           onChanged();
3455         } else {
3456           comparatorBuilder_.clear();
3457         }
3458         bitField0_ = (bitField0_ & ~0x00000002);
3459         return this;
3460       }
3461       /**
3462        * <code>optional .Comparator comparator = 2;</code>
3463        */
getComparatorBuilder()3464       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
3465         bitField0_ |= 0x00000002;
3466         onChanged();
3467         return getComparatorFieldBuilder().getBuilder();
3468       }
3469       /**
3470        * <code>optional .Comparator comparator = 2;</code>
3471        */
getComparatorOrBuilder()3472       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
3473         if (comparatorBuilder_ != null) {
3474           return comparatorBuilder_.getMessageOrBuilder();
3475         } else {
3476           return comparator_;
3477         }
3478       }
3479       /**
3480        * <code>optional .Comparator comparator = 2;</code>
3481        */
3482       private com.google.protobuf.SingleFieldBuilder<
3483           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>
getComparatorFieldBuilder()3484           getComparatorFieldBuilder() {
3485         if (comparatorBuilder_ == null) {
3486           comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3487               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
3488                   comparator_,
3489                   getParentForChildren(),
3490                   isClean());
3491           comparator_ = null;
3492         }
3493         return comparatorBuilder_;
3494       }
3495 
3496       // @@protoc_insertion_point(builder_scope:CompareFilter)
3497     }
3498 
3499     static {
3500       defaultInstance = new CompareFilter(true);
defaultInstance.initFields()3501       defaultInstance.initFields();
3502     }
3503 
3504     // @@protoc_insertion_point(class_scope:CompareFilter)
3505   }
3506 
3507   public interface DependentColumnFilterOrBuilder
3508       extends com.google.protobuf.MessageOrBuilder {
3509 
3510     // required .CompareFilter compare_filter = 1;
3511     /**
3512      * <code>required .CompareFilter compare_filter = 1;</code>
3513      */
hasCompareFilter()3514     boolean hasCompareFilter();
3515     /**
3516      * <code>required .CompareFilter compare_filter = 1;</code>
3517      */
getCompareFilter()3518     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
3519     /**
3520      * <code>required .CompareFilter compare_filter = 1;</code>
3521      */
getCompareFilterOrBuilder()3522     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
3523 
3524     // optional bytes column_family = 2;
3525     /**
3526      * <code>optional bytes column_family = 2;</code>
3527      */
hasColumnFamily()3528     boolean hasColumnFamily();
3529     /**
3530      * <code>optional bytes column_family = 2;</code>
3531      */
getColumnFamily()3532     com.google.protobuf.ByteString getColumnFamily();
3533 
3534     // optional bytes column_qualifier = 3;
3535     /**
3536      * <code>optional bytes column_qualifier = 3;</code>
3537      */
hasColumnQualifier()3538     boolean hasColumnQualifier();
3539     /**
3540      * <code>optional bytes column_qualifier = 3;</code>
3541      */
getColumnQualifier()3542     com.google.protobuf.ByteString getColumnQualifier();
3543 
3544     // optional bool drop_dependent_column = 4;
3545     /**
3546      * <code>optional bool drop_dependent_column = 4;</code>
3547      */
hasDropDependentColumn()3548     boolean hasDropDependentColumn();
3549     /**
3550      * <code>optional bool drop_dependent_column = 4;</code>
3551      */
getDropDependentColumn()3552     boolean getDropDependentColumn();
3553   }
3554   /**
3555    * Protobuf type {@code DependentColumnFilter}
3556    */
3557   public static final class DependentColumnFilter extends
3558       com.google.protobuf.GeneratedMessage
3559       implements DependentColumnFilterOrBuilder {
3560     // Use DependentColumnFilter.newBuilder() to construct.
DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)3561     private DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3562       super(builder);
3563       this.unknownFields = builder.getUnknownFields();
3564     }
DependentColumnFilter(boolean noInit)3565     private DependentColumnFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3566 
3567     private static final DependentColumnFilter defaultInstance;
getDefaultInstance()3568     public static DependentColumnFilter getDefaultInstance() {
3569       return defaultInstance;
3570     }
3571 
getDefaultInstanceForType()3572     public DependentColumnFilter getDefaultInstanceForType() {
3573       return defaultInstance;
3574     }
3575 
3576     private final com.google.protobuf.UnknownFieldSet unknownFields;
3577     @java.lang.Override
3578     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()3579         getUnknownFields() {
3580       return this.unknownFields;
3581     }
DependentColumnFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3582     private DependentColumnFilter(
3583         com.google.protobuf.CodedInputStream input,
3584         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3585         throws com.google.protobuf.InvalidProtocolBufferException {
3586       initFields();
3587       int mutable_bitField0_ = 0;
3588       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3589           com.google.protobuf.UnknownFieldSet.newBuilder();
3590       try {
3591         boolean done = false;
3592         while (!done) {
3593           int tag = input.readTag();
3594           switch (tag) {
3595             case 0:
3596               done = true;
3597               break;
3598             default: {
3599               if (!parseUnknownField(input, unknownFields,
3600                                      extensionRegistry, tag)) {
3601                 done = true;
3602               }
3603               break;
3604             }
3605             case 10: {
3606               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
3607               if (((bitField0_ & 0x00000001) == 0x00000001)) {
3608                 subBuilder = compareFilter_.toBuilder();
3609               }
3610               compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
3611               if (subBuilder != null) {
3612                 subBuilder.mergeFrom(compareFilter_);
3613                 compareFilter_ = subBuilder.buildPartial();
3614               }
3615               bitField0_ |= 0x00000001;
3616               break;
3617             }
3618             case 18: {
3619               bitField0_ |= 0x00000002;
3620               columnFamily_ = input.readBytes();
3621               break;
3622             }
3623             case 26: {
3624               bitField0_ |= 0x00000004;
3625               columnQualifier_ = input.readBytes();
3626               break;
3627             }
3628             case 32: {
3629               bitField0_ |= 0x00000008;
3630               dropDependentColumn_ = input.readBool();
3631               break;
3632             }
3633           }
3634         }
3635       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3636         throw e.setUnfinishedMessage(this);
3637       } catch (java.io.IOException e) {
3638         throw new com.google.protobuf.InvalidProtocolBufferException(
3639             e.getMessage()).setUnfinishedMessage(this);
3640       } finally {
3641         this.unknownFields = unknownFields.build();
3642         makeExtensionsImmutable();
3643       }
3644     }
3645     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3646         getDescriptor() {
3647       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor;
3648     }
3649 
3650     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3651         internalGetFieldAccessorTable() {
3652       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable
3653           .ensureFieldAccessorsInitialized(
3654               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class);
3655     }
3656 
3657     public static com.google.protobuf.Parser<DependentColumnFilter> PARSER =
3658         new com.google.protobuf.AbstractParser<DependentColumnFilter>() {
3659       public DependentColumnFilter parsePartialFrom(
3660           com.google.protobuf.CodedInputStream input,
3661           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3662           throws com.google.protobuf.InvalidProtocolBufferException {
3663         return new DependentColumnFilter(input, extensionRegistry);
3664       }
3665     };
3666 
3667     @java.lang.Override
getParserForType()3668     public com.google.protobuf.Parser<DependentColumnFilter> getParserForType() {
3669       return PARSER;
3670     }
3671 
3672     private int bitField0_;
3673     // required .CompareFilter compare_filter = 1;
3674     public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
3675     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
3676     /**
3677      * <code>required .CompareFilter compare_filter = 1;</code>
3678      */
hasCompareFilter()3679     public boolean hasCompareFilter() {
3680       return ((bitField0_ & 0x00000001) == 0x00000001);
3681     }
3682     /**
3683      * <code>required .CompareFilter compare_filter = 1;</code>
3684      */
getCompareFilter()3685     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
3686       return compareFilter_;
3687     }
3688     /**
3689      * <code>required .CompareFilter compare_filter = 1;</code>
3690      */
getCompareFilterOrBuilder()3691     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
3692       return compareFilter_;
3693     }
3694 
3695     // optional bytes column_family = 2;
3696     public static final int COLUMN_FAMILY_FIELD_NUMBER = 2;
3697     private com.google.protobuf.ByteString columnFamily_;
3698     /**
3699      * <code>optional bytes column_family = 2;</code>
3700      */
hasColumnFamily()3701     public boolean hasColumnFamily() {
3702       return ((bitField0_ & 0x00000002) == 0x00000002);
3703     }
3704     /**
3705      * <code>optional bytes column_family = 2;</code>
3706      */
getColumnFamily()3707     public com.google.protobuf.ByteString getColumnFamily() {
3708       return columnFamily_;
3709     }
3710 
3711     // optional bytes column_qualifier = 3;
3712     public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 3;
3713     private com.google.protobuf.ByteString columnQualifier_;
3714     /**
3715      * <code>optional bytes column_qualifier = 3;</code>
3716      */
hasColumnQualifier()3717     public boolean hasColumnQualifier() {
3718       return ((bitField0_ & 0x00000004) == 0x00000004);
3719     }
3720     /**
3721      * <code>optional bytes column_qualifier = 3;</code>
3722      */
getColumnQualifier()3723     public com.google.protobuf.ByteString getColumnQualifier() {
3724       return columnQualifier_;
3725     }
3726 
3727     // optional bool drop_dependent_column = 4;
3728     public static final int DROP_DEPENDENT_COLUMN_FIELD_NUMBER = 4;
3729     private boolean dropDependentColumn_;
3730     /**
3731      * <code>optional bool drop_dependent_column = 4;</code>
3732      */
hasDropDependentColumn()3733     public boolean hasDropDependentColumn() {
3734       return ((bitField0_ & 0x00000008) == 0x00000008);
3735     }
3736     /**
3737      * <code>optional bool drop_dependent_column = 4;</code>
3738      */
getDropDependentColumn()3739     public boolean getDropDependentColumn() {
3740       return dropDependentColumn_;
3741     }
3742 
initFields()3743     private void initFields() {
3744       compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
3745       columnFamily_ = com.google.protobuf.ByteString.EMPTY;
3746       columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
3747       dropDependentColumn_ = false;
3748     }
3749     private byte memoizedIsInitialized = -1;
isInitialized()3750     public final boolean isInitialized() {
3751       byte isInitialized = memoizedIsInitialized;
3752       if (isInitialized != -1) return isInitialized == 1;
3753 
3754       if (!hasCompareFilter()) {
3755         memoizedIsInitialized = 0;
3756         return false;
3757       }
3758       if (!getCompareFilter().isInitialized()) {
3759         memoizedIsInitialized = 0;
3760         return false;
3761       }
3762       memoizedIsInitialized = 1;
3763       return true;
3764     }
3765 
writeTo(com.google.protobuf.CodedOutputStream output)3766     public void writeTo(com.google.protobuf.CodedOutputStream output)
3767                         throws java.io.IOException {
3768       getSerializedSize();
3769       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3770         output.writeMessage(1, compareFilter_);
3771       }
3772       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3773         output.writeBytes(2, columnFamily_);
3774       }
3775       if (((bitField0_ & 0x00000004) == 0x00000004)) {
3776         output.writeBytes(3, columnQualifier_);
3777       }
3778       if (((bitField0_ & 0x00000008) == 0x00000008)) {
3779         output.writeBool(4, dropDependentColumn_);
3780       }
3781       getUnknownFields().writeTo(output);
3782     }
3783 
3784     private int memoizedSerializedSize = -1;
getSerializedSize()3785     public int getSerializedSize() {
3786       int size = memoizedSerializedSize;
3787       if (size != -1) return size;
3788 
3789       size = 0;
3790       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3791         size += com.google.protobuf.CodedOutputStream
3792           .computeMessageSize(1, compareFilter_);
3793       }
3794       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3795         size += com.google.protobuf.CodedOutputStream
3796           .computeBytesSize(2, columnFamily_);
3797       }
3798       if (((bitField0_ & 0x00000004) == 0x00000004)) {
3799         size += com.google.protobuf.CodedOutputStream
3800           .computeBytesSize(3, columnQualifier_);
3801       }
3802       if (((bitField0_ & 0x00000008) == 0x00000008)) {
3803         size += com.google.protobuf.CodedOutputStream
3804           .computeBoolSize(4, dropDependentColumn_);
3805       }
3806       size += getUnknownFields().getSerializedSize();
3807       memoizedSerializedSize = size;
3808       return size;
3809     }
3810 
3811     private static final long serialVersionUID = 0L;
3812     @java.lang.Override
writeReplace()3813     protected java.lang.Object writeReplace()
3814         throws java.io.ObjectStreamException {
3815       return super.writeReplace();
3816     }
3817 
3818     @java.lang.Override
equals(final java.lang.Object obj)3819     public boolean equals(final java.lang.Object obj) {
3820       if (obj == this) {
3821        return true;
3822       }
3823       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)) {
3824         return super.equals(obj);
3825       }
3826       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj;
3827 
3828       boolean result = true;
3829       result = result && (hasCompareFilter() == other.hasCompareFilter());
3830       if (hasCompareFilter()) {
3831         result = result && getCompareFilter()
3832             .equals(other.getCompareFilter());
3833       }
3834       result = result && (hasColumnFamily() == other.hasColumnFamily());
3835       if (hasColumnFamily()) {
3836         result = result && getColumnFamily()
3837             .equals(other.getColumnFamily());
3838       }
3839       result = result && (hasColumnQualifier() == other.hasColumnQualifier());
3840       if (hasColumnQualifier()) {
3841         result = result && getColumnQualifier()
3842             .equals(other.getColumnQualifier());
3843       }
3844       result = result && (hasDropDependentColumn() == other.hasDropDependentColumn());
3845       if (hasDropDependentColumn()) {
3846         result = result && (getDropDependentColumn()
3847             == other.getDropDependentColumn());
3848       }
3849       result = result &&
3850           getUnknownFields().equals(other.getUnknownFields());
3851       return result;
3852     }
3853 
3854     private int memoizedHashCode = 0;
3855     @java.lang.Override
hashCode()3856     public int hashCode() {
3857       if (memoizedHashCode != 0) {
3858         return memoizedHashCode;
3859       }
3860       int hash = 41;
3861       hash = (19 * hash) + getDescriptorForType().hashCode();
3862       if (hasCompareFilter()) {
3863         hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
3864         hash = (53 * hash) + getCompareFilter().hashCode();
3865       }
3866       if (hasColumnFamily()) {
3867         hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER;
3868         hash = (53 * hash) + getColumnFamily().hashCode();
3869       }
3870       if (hasColumnQualifier()) {
3871         hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER;
3872         hash = (53 * hash) + getColumnQualifier().hashCode();
3873       }
3874       if (hasDropDependentColumn()) {
3875         hash = (37 * hash) + DROP_DEPENDENT_COLUMN_FIELD_NUMBER;
3876         hash = (53 * hash) + hashBoolean(getDropDependentColumn());
3877       }
3878       hash = (29 * hash) + getUnknownFields().hashCode();
3879       memoizedHashCode = hash;
3880       return hash;
3881     }
3882 
parseFrom( com.google.protobuf.ByteString data)3883     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
3884         com.google.protobuf.ByteString data)
3885         throws com.google.protobuf.InvalidProtocolBufferException {
3886       return PARSER.parseFrom(data);
3887     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3888     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
3889         com.google.protobuf.ByteString data,
3890         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3891         throws com.google.protobuf.InvalidProtocolBufferException {
3892       return PARSER.parseFrom(data, extensionRegistry);
3893     }
parseFrom(byte[] data)3894     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data)
3895         throws com.google.protobuf.InvalidProtocolBufferException {
3896       return PARSER.parseFrom(data);
3897     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3898     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
3899         byte[] data,
3900         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3901         throws com.google.protobuf.InvalidProtocolBufferException {
3902       return PARSER.parseFrom(data, extensionRegistry);
3903     }
parseFrom(java.io.InputStream input)3904     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input)
3905         throws java.io.IOException {
3906       return PARSER.parseFrom(input);
3907     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3908     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
3909         java.io.InputStream input,
3910         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3911         throws java.io.IOException {
3912       return PARSER.parseFrom(input, extensionRegistry);
3913     }
parseDelimitedFrom(java.io.InputStream input)3914     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input)
3915         throws java.io.IOException {
3916       return PARSER.parseDelimitedFrom(input);
3917     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3918     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(
3919         java.io.InputStream input,
3920         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3921         throws java.io.IOException {
3922       return PARSER.parseDelimitedFrom(input, extensionRegistry);
3923     }
parseFrom( com.google.protobuf.CodedInputStream input)3924     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
3925         com.google.protobuf.CodedInputStream input)
3926         throws java.io.IOException {
3927       return PARSER.parseFrom(input);
3928     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3929     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(
3930         com.google.protobuf.CodedInputStream input,
3931         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3932         throws java.io.IOException {
3933       return PARSER.parseFrom(input, extensionRegistry);
3934     }
3935 
newBuilder()3936     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()3937     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype)3938     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) {
3939       return newBuilder().mergeFrom(prototype);
3940     }
toBuilder()3941     public Builder toBuilder() { return newBuilder(this); }
3942 
3943     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3944     protected Builder newBuilderForType(
3945         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3946       Builder builder = new Builder(parent);
3947       return builder;
3948     }
3949     /**
3950      * Protobuf type {@code DependentColumnFilter}
3951      */
3952     public static final class Builder extends
3953         com.google.protobuf.GeneratedMessage.Builder<Builder>
3954        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder {
3955       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3956           getDescriptor() {
3957         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor;
3958       }
3959 
3960       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3961           internalGetFieldAccessorTable() {
3962         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable
3963             .ensureFieldAccessorsInitialized(
3964                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class);
3965       }
3966 
3967       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder()
Builder()3968       private Builder() {
3969         maybeForceBuilderInitialization();
3970       }
3971 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3972       private Builder(
3973           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
3974         super(parent);
3975         maybeForceBuilderInitialization();
3976       }
maybeForceBuilderInitialization()3977       private void maybeForceBuilderInitialization() {
3978         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
3979           getCompareFilterFieldBuilder();
3980         }
3981       }
create()3982       private static Builder create() {
3983         return new Builder();
3984       }
3985 
clear()3986       public Builder clear() {
3987         super.clear();
3988         if (compareFilterBuilder_ == null) {
3989           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
3990         } else {
3991           compareFilterBuilder_.clear();
3992         }
3993         bitField0_ = (bitField0_ & ~0x00000001);
3994         columnFamily_ = com.google.protobuf.ByteString.EMPTY;
3995         bitField0_ = (bitField0_ & ~0x00000002);
3996         columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
3997         bitField0_ = (bitField0_ & ~0x00000004);
3998         dropDependentColumn_ = false;
3999         bitField0_ = (bitField0_ & ~0x00000008);
4000         return this;
4001       }
4002 
clone()4003       public Builder clone() {
4004         return create().mergeFrom(buildPartial());
4005       }
4006 
4007       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4008           getDescriptorForType() {
4009         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor;
4010       }
4011 
getDefaultInstanceForType()4012       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() {
4013         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance();
4014       }
4015 
build()4016       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() {
4017         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial();
4018         if (!result.isInitialized()) {
4019           throw newUninitializedMessageException(result);
4020         }
4021         return result;
4022       }
4023 
buildPartial()4024       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() {
4025         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this);
4026         int from_bitField0_ = bitField0_;
4027         int to_bitField0_ = 0;
4028         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4029           to_bitField0_ |= 0x00000001;
4030         }
4031         if (compareFilterBuilder_ == null) {
4032           result.compareFilter_ = compareFilter_;
4033         } else {
4034           result.compareFilter_ = compareFilterBuilder_.build();
4035         }
4036         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4037           to_bitField0_ |= 0x00000002;
4038         }
4039         result.columnFamily_ = columnFamily_;
4040         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4041           to_bitField0_ |= 0x00000004;
4042         }
4043         result.columnQualifier_ = columnQualifier_;
4044         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
4045           to_bitField0_ |= 0x00000008;
4046         }
4047         result.dropDependentColumn_ = dropDependentColumn_;
4048         result.bitField0_ = to_bitField0_;
4049         onBuilt();
4050         return result;
4051       }
4052 
mergeFrom(com.google.protobuf.Message other)4053       public Builder mergeFrom(com.google.protobuf.Message other) {
4054         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) {
4055           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other);
4056         } else {
4057           super.mergeFrom(other);
4058           return this;
4059         }
4060       }
4061 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other)4062       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) {
4063         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this;
4064         if (other.hasCompareFilter()) {
4065           mergeCompareFilter(other.getCompareFilter());
4066         }
4067         if (other.hasColumnFamily()) {
4068           setColumnFamily(other.getColumnFamily());
4069         }
4070         if (other.hasColumnQualifier()) {
4071           setColumnQualifier(other.getColumnQualifier());
4072         }
4073         if (other.hasDropDependentColumn()) {
4074           setDropDependentColumn(other.getDropDependentColumn());
4075         }
4076         this.mergeUnknownFields(other.getUnknownFields());
4077         return this;
4078       }
4079 
isInitialized()4080       public final boolean isInitialized() {
4081         if (!hasCompareFilter()) {
4082 
4083           return false;
4084         }
4085         if (!getCompareFilter().isInitialized()) {
4086 
4087           return false;
4088         }
4089         return true;
4090       }
4091 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4092       public Builder mergeFrom(
4093           com.google.protobuf.CodedInputStream input,
4094           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4095           throws java.io.IOException {
4096         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parsedMessage = null;
4097         try {
4098           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4099         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4100           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) e.getUnfinishedMessage();
4101           throw e;
4102         } finally {
4103           if (parsedMessage != null) {
4104             mergeFrom(parsedMessage);
4105           }
4106         }
4107         return this;
4108       }
4109       private int bitField0_;
4110 
4111       // required .CompareFilter compare_filter = 1;
4112       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
4113       private com.google.protobuf.SingleFieldBuilder<
4114           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
4115       /**
4116        * <code>required .CompareFilter compare_filter = 1;</code>
4117        */
hasCompareFilter()4118       public boolean hasCompareFilter() {
4119         return ((bitField0_ & 0x00000001) == 0x00000001);
4120       }
4121       /**
4122        * <code>required .CompareFilter compare_filter = 1;</code>
4123        */
getCompareFilter()4124       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
4125         if (compareFilterBuilder_ == null) {
4126           return compareFilter_;
4127         } else {
4128           return compareFilterBuilder_.getMessage();
4129         }
4130       }
4131       /**
4132        * <code>required .CompareFilter compare_filter = 1;</code>
4133        */
setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4134       public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
4135         if (compareFilterBuilder_ == null) {
4136           if (value == null) {
4137             throw new NullPointerException();
4138           }
4139           compareFilter_ = value;
4140           onChanged();
4141         } else {
4142           compareFilterBuilder_.setMessage(value);
4143         }
4144         bitField0_ |= 0x00000001;
4145         return this;
4146       }
4147       /**
4148        * <code>required .CompareFilter compare_filter = 1;</code>
4149        */
setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)4150       public Builder setCompareFilter(
4151           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
4152         if (compareFilterBuilder_ == null) {
4153           compareFilter_ = builderForValue.build();
4154           onChanged();
4155         } else {
4156           compareFilterBuilder_.setMessage(builderForValue.build());
4157         }
4158         bitField0_ |= 0x00000001;
4159         return this;
4160       }
4161       /**
4162        * <code>required .CompareFilter compare_filter = 1;</code>
4163        */
mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4164       public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
4165         if (compareFilterBuilder_ == null) {
4166           if (((bitField0_ & 0x00000001) == 0x00000001) &&
4167               compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
4168             compareFilter_ =
4169               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
4170           } else {
4171             compareFilter_ = value;
4172           }
4173           onChanged();
4174         } else {
4175           compareFilterBuilder_.mergeFrom(value);
4176         }
4177         bitField0_ |= 0x00000001;
4178         return this;
4179       }
4180       /**
4181        * <code>required .CompareFilter compare_filter = 1;</code>
4182        */
clearCompareFilter()4183       public Builder clearCompareFilter() {
4184         if (compareFilterBuilder_ == null) {
4185           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
4186           onChanged();
4187         } else {
4188           compareFilterBuilder_.clear();
4189         }
4190         bitField0_ = (bitField0_ & ~0x00000001);
4191         return this;
4192       }
4193       /**
4194        * <code>required .CompareFilter compare_filter = 1;</code>
4195        */
getCompareFilterBuilder()4196       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
4197         bitField0_ |= 0x00000001;
4198         onChanged();
4199         return getCompareFilterFieldBuilder().getBuilder();
4200       }
4201       /**
4202        * <code>required .CompareFilter compare_filter = 1;</code>
4203        */
getCompareFilterOrBuilder()4204       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
4205         if (compareFilterBuilder_ != null) {
4206           return compareFilterBuilder_.getMessageOrBuilder();
4207         } else {
4208           return compareFilter_;
4209         }
4210       }
4211       /**
4212        * <code>required .CompareFilter compare_filter = 1;</code>
4213        */
4214       private com.google.protobuf.SingleFieldBuilder<
4215           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder()4216           getCompareFilterFieldBuilder() {
4217         if (compareFilterBuilder_ == null) {
4218           compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
4219               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
4220                   compareFilter_,
4221                   getParentForChildren(),
4222                   isClean());
4223           compareFilter_ = null;
4224         }
4225         return compareFilterBuilder_;
4226       }
4227 
4228       // optional bytes column_family = 2;
4229       private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY;
4230       /**
4231        * <code>optional bytes column_family = 2;</code>
4232        */
hasColumnFamily()4233       public boolean hasColumnFamily() {
4234         return ((bitField0_ & 0x00000002) == 0x00000002);
4235       }
4236       /**
4237        * <code>optional bytes column_family = 2;</code>
4238        */
getColumnFamily()4239       public com.google.protobuf.ByteString getColumnFamily() {
4240         return columnFamily_;
4241       }
4242       /**
4243        * <code>optional bytes column_family = 2;</code>
4244        */
setColumnFamily(com.google.protobuf.ByteString value)4245       public Builder setColumnFamily(com.google.protobuf.ByteString value) {
4246         if (value == null) {
4247     throw new NullPointerException();
4248   }
4249   bitField0_ |= 0x00000002;
4250         columnFamily_ = value;
4251         onChanged();
4252         return this;
4253       }
4254       /**
4255        * <code>optional bytes column_family = 2;</code>
4256        */
clearColumnFamily()4257       public Builder clearColumnFamily() {
4258         bitField0_ = (bitField0_ & ~0x00000002);
4259         columnFamily_ = getDefaultInstance().getColumnFamily();
4260         onChanged();
4261         return this;
4262       }
4263 
4264       // optional bytes column_qualifier = 3;
4265       private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
4266       /**
4267        * <code>optional bytes column_qualifier = 3;</code>
4268        */
hasColumnQualifier()4269       public boolean hasColumnQualifier() {
4270         return ((bitField0_ & 0x00000004) == 0x00000004);
4271       }
4272       /**
4273        * <code>optional bytes column_qualifier = 3;</code>
4274        */
getColumnQualifier()4275       public com.google.protobuf.ByteString getColumnQualifier() {
4276         return columnQualifier_;
4277       }
4278       /**
4279        * <code>optional bytes column_qualifier = 3;</code>
4280        */
setColumnQualifier(com.google.protobuf.ByteString value)4281       public Builder setColumnQualifier(com.google.protobuf.ByteString value) {
4282         if (value == null) {
4283     throw new NullPointerException();
4284   }
4285   bitField0_ |= 0x00000004;
4286         columnQualifier_ = value;
4287         onChanged();
4288         return this;
4289       }
4290       /**
4291        * <code>optional bytes column_qualifier = 3;</code>
4292        */
clearColumnQualifier()4293       public Builder clearColumnQualifier() {
4294         bitField0_ = (bitField0_ & ~0x00000004);
4295         columnQualifier_ = getDefaultInstance().getColumnQualifier();
4296         onChanged();
4297         return this;
4298       }
4299 
4300       // optional bool drop_dependent_column = 4;
4301       private boolean dropDependentColumn_ ;
4302       /**
4303        * <code>optional bool drop_dependent_column = 4;</code>
4304        */
hasDropDependentColumn()4305       public boolean hasDropDependentColumn() {
4306         return ((bitField0_ & 0x00000008) == 0x00000008);
4307       }
4308       /**
4309        * <code>optional bool drop_dependent_column = 4;</code>
4310        */
getDropDependentColumn()4311       public boolean getDropDependentColumn() {
4312         return dropDependentColumn_;
4313       }
4314       /**
4315        * <code>optional bool drop_dependent_column = 4;</code>
4316        */
setDropDependentColumn(boolean value)4317       public Builder setDropDependentColumn(boolean value) {
4318         bitField0_ |= 0x00000008;
4319         dropDependentColumn_ = value;
4320         onChanged();
4321         return this;
4322       }
4323       /**
4324        * <code>optional bool drop_dependent_column = 4;</code>
4325        */
clearDropDependentColumn()4326       public Builder clearDropDependentColumn() {
4327         bitField0_ = (bitField0_ & ~0x00000008);
4328         dropDependentColumn_ = false;
4329         onChanged();
4330         return this;
4331       }
4332 
4333       // @@protoc_insertion_point(builder_scope:DependentColumnFilter)
4334     }
4335 
4336     static {
4337       defaultInstance = new DependentColumnFilter(true);
defaultInstance.initFields()4338       defaultInstance.initFields();
4339     }
4340 
4341     // @@protoc_insertion_point(class_scope:DependentColumnFilter)
4342   }
4343 
4344   public interface FamilyFilterOrBuilder
4345       extends com.google.protobuf.MessageOrBuilder {
4346 
4347     // required .CompareFilter compare_filter = 1;
4348     /**
4349      * <code>required .CompareFilter compare_filter = 1;</code>
4350      */
hasCompareFilter()4351     boolean hasCompareFilter();
4352     /**
4353      * <code>required .CompareFilter compare_filter = 1;</code>
4354      */
getCompareFilter()4355     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
4356     /**
4357      * <code>required .CompareFilter compare_filter = 1;</code>
4358      */
getCompareFilterOrBuilder()4359     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
4360   }
4361   /**
4362    * Protobuf type {@code FamilyFilter}
4363    */
4364   public static final class FamilyFilter extends
4365       com.google.protobuf.GeneratedMessage
4366       implements FamilyFilterOrBuilder {
4367     // Use FamilyFilter.newBuilder() to construct.
FamilyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)4368     private FamilyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4369       super(builder);
4370       this.unknownFields = builder.getUnknownFields();
4371     }
FamilyFilter(boolean noInit)4372     private FamilyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4373 
4374     private static final FamilyFilter defaultInstance;
getDefaultInstance()4375     public static FamilyFilter getDefaultInstance() {
4376       return defaultInstance;
4377     }
4378 
getDefaultInstanceForType()4379     public FamilyFilter getDefaultInstanceForType() {
4380       return defaultInstance;
4381     }
4382 
4383     private final com.google.protobuf.UnknownFieldSet unknownFields;
4384     @java.lang.Override
4385     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()4386         getUnknownFields() {
4387       return this.unknownFields;
4388     }
FamilyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4389     private FamilyFilter(
4390         com.google.protobuf.CodedInputStream input,
4391         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4392         throws com.google.protobuf.InvalidProtocolBufferException {
4393       initFields();
4394       int mutable_bitField0_ = 0;
4395       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4396           com.google.protobuf.UnknownFieldSet.newBuilder();
4397       try {
4398         boolean done = false;
4399         while (!done) {
4400           int tag = input.readTag();
4401           switch (tag) {
4402             case 0:
4403               done = true;
4404               break;
4405             default: {
4406               if (!parseUnknownField(input, unknownFields,
4407                                      extensionRegistry, tag)) {
4408                 done = true;
4409               }
4410               break;
4411             }
4412             case 10: {
4413               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
4414               if (((bitField0_ & 0x00000001) == 0x00000001)) {
4415                 subBuilder = compareFilter_.toBuilder();
4416               }
4417               compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
4418               if (subBuilder != null) {
4419                 subBuilder.mergeFrom(compareFilter_);
4420                 compareFilter_ = subBuilder.buildPartial();
4421               }
4422               bitField0_ |= 0x00000001;
4423               break;
4424             }
4425           }
4426         }
4427       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4428         throw e.setUnfinishedMessage(this);
4429       } catch (java.io.IOException e) {
4430         throw new com.google.protobuf.InvalidProtocolBufferException(
4431             e.getMessage()).setUnfinishedMessage(this);
4432       } finally {
4433         this.unknownFields = unknownFields.build();
4434         makeExtensionsImmutable();
4435       }
4436     }
4437     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4438         getDescriptor() {
4439       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor;
4440     }
4441 
4442     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4443         internalGetFieldAccessorTable() {
4444       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable
4445           .ensureFieldAccessorsInitialized(
4446               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class);
4447     }
4448 
4449     public static com.google.protobuf.Parser<FamilyFilter> PARSER =
4450         new com.google.protobuf.AbstractParser<FamilyFilter>() {
4451       public FamilyFilter parsePartialFrom(
4452           com.google.protobuf.CodedInputStream input,
4453           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4454           throws com.google.protobuf.InvalidProtocolBufferException {
4455         return new FamilyFilter(input, extensionRegistry);
4456       }
4457     };
4458 
4459     @java.lang.Override
getParserForType()4460     public com.google.protobuf.Parser<FamilyFilter> getParserForType() {
4461       return PARSER;
4462     }
4463 
4464     private int bitField0_;
4465     // required .CompareFilter compare_filter = 1;
4466     public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
4467     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
4468     /**
4469      * <code>required .CompareFilter compare_filter = 1;</code>
4470      */
hasCompareFilter()4471     public boolean hasCompareFilter() {
4472       return ((bitField0_ & 0x00000001) == 0x00000001);
4473     }
4474     /**
4475      * <code>required .CompareFilter compare_filter = 1;</code>
4476      */
getCompareFilter()4477     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
4478       return compareFilter_;
4479     }
4480     /**
4481      * <code>required .CompareFilter compare_filter = 1;</code>
4482      */
getCompareFilterOrBuilder()4483     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
4484       return compareFilter_;
4485     }
4486 
initFields()4487     private void initFields() {
4488       compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
4489     }
4490     private byte memoizedIsInitialized = -1;
isInitialized()4491     public final boolean isInitialized() {
4492       byte isInitialized = memoizedIsInitialized;
4493       if (isInitialized != -1) return isInitialized == 1;
4494 
4495       if (!hasCompareFilter()) {
4496         memoizedIsInitialized = 0;
4497         return false;
4498       }
4499       if (!getCompareFilter().isInitialized()) {
4500         memoizedIsInitialized = 0;
4501         return false;
4502       }
4503       memoizedIsInitialized = 1;
4504       return true;
4505     }
4506 
writeTo(com.google.protobuf.CodedOutputStream output)4507     public void writeTo(com.google.protobuf.CodedOutputStream output)
4508                         throws java.io.IOException {
4509       getSerializedSize();
4510       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4511         output.writeMessage(1, compareFilter_);
4512       }
4513       getUnknownFields().writeTo(output);
4514     }
4515 
4516     private int memoizedSerializedSize = -1;
getSerializedSize()4517     public int getSerializedSize() {
4518       int size = memoizedSerializedSize;
4519       if (size != -1) return size;
4520 
4521       size = 0;
4522       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4523         size += com.google.protobuf.CodedOutputStream
4524           .computeMessageSize(1, compareFilter_);
4525       }
4526       size += getUnknownFields().getSerializedSize();
4527       memoizedSerializedSize = size;
4528       return size;
4529     }
4530 
4531     private static final long serialVersionUID = 0L;
4532     @java.lang.Override
writeReplace()4533     protected java.lang.Object writeReplace()
4534         throws java.io.ObjectStreamException {
4535       return super.writeReplace();
4536     }
4537 
4538     @java.lang.Override
equals(final java.lang.Object obj)4539     public boolean equals(final java.lang.Object obj) {
4540       if (obj == this) {
4541        return true;
4542       }
4543       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)) {
4544         return super.equals(obj);
4545       }
4546       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj;
4547 
4548       boolean result = true;
4549       result = result && (hasCompareFilter() == other.hasCompareFilter());
4550       if (hasCompareFilter()) {
4551         result = result && getCompareFilter()
4552             .equals(other.getCompareFilter());
4553       }
4554       result = result &&
4555           getUnknownFields().equals(other.getUnknownFields());
4556       return result;
4557     }
4558 
4559     private int memoizedHashCode = 0;
4560     @java.lang.Override
hashCode()4561     public int hashCode() {
4562       if (memoizedHashCode != 0) {
4563         return memoizedHashCode;
4564       }
4565       int hash = 41;
4566       hash = (19 * hash) + getDescriptorForType().hashCode();
4567       if (hasCompareFilter()) {
4568         hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
4569         hash = (53 * hash) + getCompareFilter().hashCode();
4570       }
4571       hash = (29 * hash) + getUnknownFields().hashCode();
4572       memoizedHashCode = hash;
4573       return hash;
4574     }
4575 
parseFrom( com.google.protobuf.ByteString data)4576     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
4577         com.google.protobuf.ByteString data)
4578         throws com.google.protobuf.InvalidProtocolBufferException {
4579       return PARSER.parseFrom(data);
4580     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4581     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
4582         com.google.protobuf.ByteString data,
4583         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4584         throws com.google.protobuf.InvalidProtocolBufferException {
4585       return PARSER.parseFrom(data, extensionRegistry);
4586     }
parseFrom(byte[] data)4587     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data)
4588         throws com.google.protobuf.InvalidProtocolBufferException {
4589       return PARSER.parseFrom(data);
4590     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4591     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
4592         byte[] data,
4593         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4594         throws com.google.protobuf.InvalidProtocolBufferException {
4595       return PARSER.parseFrom(data, extensionRegistry);
4596     }
parseFrom(java.io.InputStream input)4597     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input)
4598         throws java.io.IOException {
4599       return PARSER.parseFrom(input);
4600     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4601     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
4602         java.io.InputStream input,
4603         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4604         throws java.io.IOException {
4605       return PARSER.parseFrom(input, extensionRegistry);
4606     }
parseDelimitedFrom(java.io.InputStream input)4607     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input)
4608         throws java.io.IOException {
4609       return PARSER.parseDelimitedFrom(input);
4610     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4611     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(
4612         java.io.InputStream input,
4613         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4614         throws java.io.IOException {
4615       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4616     }
parseFrom( com.google.protobuf.CodedInputStream input)4617     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
4618         com.google.protobuf.CodedInputStream input)
4619         throws java.io.IOException {
4620       return PARSER.parseFrom(input);
4621     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4622     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(
4623         com.google.protobuf.CodedInputStream input,
4624         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4625         throws java.io.IOException {
4626       return PARSER.parseFrom(input, extensionRegistry);
4627     }
4628 
newBuilder()4629     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()4630     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype)4631     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) {
4632       return newBuilder().mergeFrom(prototype);
4633     }
toBuilder()4634     public Builder toBuilder() { return newBuilder(this); }
4635 
4636     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4637     protected Builder newBuilderForType(
4638         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4639       Builder builder = new Builder(parent);
4640       return builder;
4641     }
4642     /**
4643      * Protobuf type {@code FamilyFilter}
4644      */
4645     public static final class Builder extends
4646         com.google.protobuf.GeneratedMessage.Builder<Builder>
4647        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder {
4648       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4649           getDescriptor() {
4650         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor;
4651       }
4652 
4653       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4654           internalGetFieldAccessorTable() {
4655         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable
4656             .ensureFieldAccessorsInitialized(
4657                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class);
4658       }
4659 
4660       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder()
Builder()4661       private Builder() {
4662         maybeForceBuilderInitialization();
4663       }
4664 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4665       private Builder(
4666           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4667         super(parent);
4668         maybeForceBuilderInitialization();
4669       }
maybeForceBuilderInitialization()4670       private void maybeForceBuilderInitialization() {
4671         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4672           getCompareFilterFieldBuilder();
4673         }
4674       }
create()4675       private static Builder create() {
4676         return new Builder();
4677       }
4678 
clear()4679       public Builder clear() {
4680         super.clear();
4681         if (compareFilterBuilder_ == null) {
4682           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
4683         } else {
4684           compareFilterBuilder_.clear();
4685         }
4686         bitField0_ = (bitField0_ & ~0x00000001);
4687         return this;
4688       }
4689 
clone()4690       public Builder clone() {
4691         return create().mergeFrom(buildPartial());
4692       }
4693 
4694       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4695           getDescriptorForType() {
4696         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor;
4697       }
4698 
getDefaultInstanceForType()4699       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() {
4700         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance();
4701       }
4702 
build()4703       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() {
4704         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial();
4705         if (!result.isInitialized()) {
4706           throw newUninitializedMessageException(result);
4707         }
4708         return result;
4709       }
4710 
buildPartial()4711       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() {
4712         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this);
4713         int from_bitField0_ = bitField0_;
4714         int to_bitField0_ = 0;
4715         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4716           to_bitField0_ |= 0x00000001;
4717         }
4718         if (compareFilterBuilder_ == null) {
4719           result.compareFilter_ = compareFilter_;
4720         } else {
4721           result.compareFilter_ = compareFilterBuilder_.build();
4722         }
4723         result.bitField0_ = to_bitField0_;
4724         onBuilt();
4725         return result;
4726       }
4727 
mergeFrom(com.google.protobuf.Message other)4728       public Builder mergeFrom(com.google.protobuf.Message other) {
4729         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) {
4730           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other);
4731         } else {
4732           super.mergeFrom(other);
4733           return this;
4734         }
4735       }
4736 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other)4737       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) {
4738         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this;
4739         if (other.hasCompareFilter()) {
4740           mergeCompareFilter(other.getCompareFilter());
4741         }
4742         this.mergeUnknownFields(other.getUnknownFields());
4743         return this;
4744       }
4745 
isInitialized()4746       public final boolean isInitialized() {
4747         if (!hasCompareFilter()) {
4748 
4749           return false;
4750         }
4751         if (!getCompareFilter().isInitialized()) {
4752 
4753           return false;
4754         }
4755         return true;
4756       }
4757 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4758       public Builder mergeFrom(
4759           com.google.protobuf.CodedInputStream input,
4760           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4761           throws java.io.IOException {
4762         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parsedMessage = null;
4763         try {
4764           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4765         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4766           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) e.getUnfinishedMessage();
4767           throw e;
4768         } finally {
4769           if (parsedMessage != null) {
4770             mergeFrom(parsedMessage);
4771           }
4772         }
4773         return this;
4774       }
4775       private int bitField0_;
4776 
4777       // required .CompareFilter compare_filter = 1;
4778       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
4779       private com.google.protobuf.SingleFieldBuilder<
4780           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
4781       /**
4782        * <code>required .CompareFilter compare_filter = 1;</code>
4783        */
hasCompareFilter()4784       public boolean hasCompareFilter() {
4785         return ((bitField0_ & 0x00000001) == 0x00000001);
4786       }
4787       /**
4788        * <code>required .CompareFilter compare_filter = 1;</code>
4789        */
getCompareFilter()4790       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
4791         if (compareFilterBuilder_ == null) {
4792           return compareFilter_;
4793         } else {
4794           return compareFilterBuilder_.getMessage();
4795         }
4796       }
4797       /**
4798        * <code>required .CompareFilter compare_filter = 1;</code>
4799        */
setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4800       public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
4801         if (compareFilterBuilder_ == null) {
4802           if (value == null) {
4803             throw new NullPointerException();
4804           }
4805           compareFilter_ = value;
4806           onChanged();
4807         } else {
4808           compareFilterBuilder_.setMessage(value);
4809         }
4810         bitField0_ |= 0x00000001;
4811         return this;
4812       }
4813       /**
4814        * <code>required .CompareFilter compare_filter = 1;</code>
4815        */
setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)4816       public Builder setCompareFilter(
4817           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
4818         if (compareFilterBuilder_ == null) {
4819           compareFilter_ = builderForValue.build();
4820           onChanged();
4821         } else {
4822           compareFilterBuilder_.setMessage(builderForValue.build());
4823         }
4824         bitField0_ |= 0x00000001;
4825         return this;
4826       }
4827       /**
4828        * <code>required .CompareFilter compare_filter = 1;</code>
4829        */
mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4830       public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
4831         if (compareFilterBuilder_ == null) {
4832           if (((bitField0_ & 0x00000001) == 0x00000001) &&
4833               compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
4834             compareFilter_ =
4835               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
4836           } else {
4837             compareFilter_ = value;
4838           }
4839           onChanged();
4840         } else {
4841           compareFilterBuilder_.mergeFrom(value);
4842         }
4843         bitField0_ |= 0x00000001;
4844         return this;
4845       }
4846       /**
4847        * <code>required .CompareFilter compare_filter = 1;</code>
4848        */
clearCompareFilter()4849       public Builder clearCompareFilter() {
4850         if (compareFilterBuilder_ == null) {
4851           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
4852           onChanged();
4853         } else {
4854           compareFilterBuilder_.clear();
4855         }
4856         bitField0_ = (bitField0_ & ~0x00000001);
4857         return this;
4858       }
4859       /**
4860        * <code>required .CompareFilter compare_filter = 1;</code>
4861        */
getCompareFilterBuilder()4862       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
4863         bitField0_ |= 0x00000001;
4864         onChanged();
4865         return getCompareFilterFieldBuilder().getBuilder();
4866       }
4867       /**
4868        * <code>required .CompareFilter compare_filter = 1;</code>
4869        */
getCompareFilterOrBuilder()4870       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
4871         if (compareFilterBuilder_ != null) {
4872           return compareFilterBuilder_.getMessageOrBuilder();
4873         } else {
4874           return compareFilter_;
4875         }
4876       }
4877       /**
4878        * <code>required .CompareFilter compare_filter = 1;</code>
4879        */
4880       private com.google.protobuf.SingleFieldBuilder<
4881           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder()4882           getCompareFilterFieldBuilder() {
4883         if (compareFilterBuilder_ == null) {
4884           compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
4885               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
4886                   compareFilter_,
4887                   getParentForChildren(),
4888                   isClean());
4889           compareFilter_ = null;
4890         }
4891         return compareFilterBuilder_;
4892       }
4893 
4894       // @@protoc_insertion_point(builder_scope:FamilyFilter)
4895     }
4896 
4897     static {
4898       defaultInstance = new FamilyFilter(true);
defaultInstance.initFields()4899       defaultInstance.initFields();
4900     }
4901 
4902     // @@protoc_insertion_point(class_scope:FamilyFilter)
4903   }
4904 
4905   public interface FilterListOrBuilder
4906       extends com.google.protobuf.MessageOrBuilder {
4907 
4908     // required .FilterList.Operator operator = 1;
4909     /**
4910      * <code>required .FilterList.Operator operator = 1;</code>
4911      */
hasOperator()4912     boolean hasOperator();
4913     /**
4914      * <code>required .FilterList.Operator operator = 1;</code>
4915      */
getOperator()4916     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator();
4917 
4918     // repeated .Filter filters = 2;
4919     /**
4920      * <code>repeated .Filter filters = 2;</code>
4921      */
4922     java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>
getFiltersList()4923         getFiltersList();
4924     /**
4925      * <code>repeated .Filter filters = 2;</code>
4926      */
getFilters(int index)4927     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index);
4928     /**
4929      * <code>repeated .Filter filters = 2;</code>
4930      */
getFiltersCount()4931     int getFiltersCount();
4932     /**
4933      * <code>repeated .Filter filters = 2;</code>
4934      */
4935     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersOrBuilderList()4936         getFiltersOrBuilderList();
4937     /**
4938      * <code>repeated .Filter filters = 2;</code>
4939      */
getFiltersOrBuilder( int index)4940     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder(
4941         int index);
4942   }
4943   /**
4944    * Protobuf type {@code FilterList}
4945    */
4946   public static final class FilterList extends
4947       com.google.protobuf.GeneratedMessage
4948       implements FilterListOrBuilder {
4949     // Use FilterList.newBuilder() to construct.
FilterList(com.google.protobuf.GeneratedMessage.Builder<?> builder)4950     private FilterList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4951       super(builder);
4952       this.unknownFields = builder.getUnknownFields();
4953     }
FilterList(boolean noInit)4954     private FilterList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4955 
4956     private static final FilterList defaultInstance;
getDefaultInstance()4957     public static FilterList getDefaultInstance() {
4958       return defaultInstance;
4959     }
4960 
getDefaultInstanceForType()4961     public FilterList getDefaultInstanceForType() {
4962       return defaultInstance;
4963     }
4964 
4965     private final com.google.protobuf.UnknownFieldSet unknownFields;
4966     @java.lang.Override
4967     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()4968         getUnknownFields() {
4969       return this.unknownFields;
4970     }
FilterList( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4971     private FilterList(
4972         com.google.protobuf.CodedInputStream input,
4973         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4974         throws com.google.protobuf.InvalidProtocolBufferException {
4975       initFields();
4976       int mutable_bitField0_ = 0;
4977       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4978           com.google.protobuf.UnknownFieldSet.newBuilder();
4979       try {
4980         boolean done = false;
4981         while (!done) {
4982           int tag = input.readTag();
4983           switch (tag) {
4984             case 0:
4985               done = true;
4986               break;
4987             default: {
4988               if (!parseUnknownField(input, unknownFields,
4989                                      extensionRegistry, tag)) {
4990                 done = true;
4991               }
4992               break;
4993             }
4994             case 8: {
4995               int rawValue = input.readEnum();
4996               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue);
4997               if (value == null) {
4998                 unknownFields.mergeVarintField(1, rawValue);
4999               } else {
5000                 bitField0_ |= 0x00000001;
5001                 operator_ = value;
5002               }
5003               break;
5004             }
5005             case 18: {
5006               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
5007                 filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>();
5008                 mutable_bitField0_ |= 0x00000002;
5009               }
5010               filters_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry));
5011               break;
5012             }
5013           }
5014         }
5015       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5016         throw e.setUnfinishedMessage(this);
5017       } catch (java.io.IOException e) {
5018         throw new com.google.protobuf.InvalidProtocolBufferException(
5019             e.getMessage()).setUnfinishedMessage(this);
5020       } finally {
5021         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
5022           filters_ = java.util.Collections.unmodifiableList(filters_);
5023         }
5024         this.unknownFields = unknownFields.build();
5025         makeExtensionsImmutable();
5026       }
5027     }
5028     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5029         getDescriptor() {
5030       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor;
5031     }
5032 
5033     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5034         internalGetFieldAccessorTable() {
5035       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable
5036           .ensureFieldAccessorsInitialized(
5037               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class);
5038     }
5039 
5040     public static com.google.protobuf.Parser<FilterList> PARSER =
5041         new com.google.protobuf.AbstractParser<FilterList>() {
5042       public FilterList parsePartialFrom(
5043           com.google.protobuf.CodedInputStream input,
5044           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5045           throws com.google.protobuf.InvalidProtocolBufferException {
5046         return new FilterList(input, extensionRegistry);
5047       }
5048     };
5049 
5050     @java.lang.Override
getParserForType()5051     public com.google.protobuf.Parser<FilterList> getParserForType() {
5052       return PARSER;
5053     }
5054 
5055     /**
5056      * Protobuf enum {@code FilterList.Operator}
5057      */
5058     public enum Operator
5059         implements com.google.protobuf.ProtocolMessageEnum {
5060       /**
5061        * <code>MUST_PASS_ALL = 1;</code>
5062        */
5063       MUST_PASS_ALL(0, 1),
5064       /**
5065        * <code>MUST_PASS_ONE = 2;</code>
5066        */
5067       MUST_PASS_ONE(1, 2),
5068       ;
5069 
5070       /**
5071        * <code>MUST_PASS_ALL = 1;</code>
5072        */
5073       public static final int MUST_PASS_ALL_VALUE = 1;
5074       /**
5075        * <code>MUST_PASS_ONE = 2;</code>
5076        */
5077       public static final int MUST_PASS_ONE_VALUE = 2;
5078 
5079 
getNumber()5080       public final int getNumber() { return value; }
5081 
valueOf(int value)5082       public static Operator valueOf(int value) {
5083         switch (value) {
5084           case 1: return MUST_PASS_ALL;
5085           case 2: return MUST_PASS_ONE;
5086           default: return null;
5087         }
5088       }
5089 
5090       public static com.google.protobuf.Internal.EnumLiteMap<Operator>
internalGetValueMap()5091           internalGetValueMap() {
5092         return internalValueMap;
5093       }
5094       private static com.google.protobuf.Internal.EnumLiteMap<Operator>
5095           internalValueMap =
5096             new com.google.protobuf.Internal.EnumLiteMap<Operator>() {
5097               public Operator findValueByNumber(int number) {
5098                 return Operator.valueOf(number);
5099               }
5100             };
5101 
5102       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()5103           getValueDescriptor() {
5104         return getDescriptor().getValues().get(index);
5105       }
5106       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()5107           getDescriptorForType() {
5108         return getDescriptor();
5109       }
5110       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()5111           getDescriptor() {
5112         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0);
5113       }
5114 
5115       private static final Operator[] VALUES = values();
5116 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)5117       public static Operator valueOf(
5118           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
5119         if (desc.getType() != getDescriptor()) {
5120           throw new java.lang.IllegalArgumentException(
5121             "EnumValueDescriptor is not for this type.");
5122         }
5123         return VALUES[desc.getIndex()];
5124       }
5125 
5126       private final int index;
5127       private final int value;
5128 
Operator(int index, int value)5129       private Operator(int index, int value) {
5130         this.index = index;
5131         this.value = value;
5132       }
5133 
5134       // @@protoc_insertion_point(enum_scope:FilterList.Operator)
5135     }
5136 
5137     private int bitField0_;
5138     // required .FilterList.Operator operator = 1;
5139     public static final int OPERATOR_FIELD_NUMBER = 1;
5140     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_;
5141     /**
5142      * <code>required .FilterList.Operator operator = 1;</code>
5143      */
hasOperator()5144     public boolean hasOperator() {
5145       return ((bitField0_ & 0x00000001) == 0x00000001);
5146     }
5147     /**
5148      * <code>required .FilterList.Operator operator = 1;</code>
5149      */
getOperator()5150     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() {
5151       return operator_;
5152     }
5153 
5154     // repeated .Filter filters = 2;
5155     public static final int FILTERS_FIELD_NUMBER = 2;
5156     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_;
5157     /**
5158      * <code>repeated .Filter filters = 2;</code>
5159      */
getFiltersList()5160     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() {
5161       return filters_;
5162     }
5163     /**
5164      * <code>repeated .Filter filters = 2;</code>
5165      */
5166     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersOrBuilderList()5167         getFiltersOrBuilderList() {
5168       return filters_;
5169     }
5170     /**
5171      * <code>repeated .Filter filters = 2;</code>
5172      */
getFiltersCount()5173     public int getFiltersCount() {
5174       return filters_.size();
5175     }
5176     /**
5177      * <code>repeated .Filter filters = 2;</code>
5178      */
getFilters(int index)5179     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) {
5180       return filters_.get(index);
5181     }
5182     /**
5183      * <code>repeated .Filter filters = 2;</code>
5184      */
getFiltersOrBuilder( int index)5185     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder(
5186         int index) {
5187       return filters_.get(index);
5188     }
5189 
initFields()5190     private void initFields() {
5191       operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
5192       filters_ = java.util.Collections.emptyList();
5193     }
5194     private byte memoizedIsInitialized = -1;
isInitialized()5195     public final boolean isInitialized() {
5196       byte isInitialized = memoizedIsInitialized;
5197       if (isInitialized != -1) return isInitialized == 1;
5198 
5199       if (!hasOperator()) {
5200         memoizedIsInitialized = 0;
5201         return false;
5202       }
5203       for (int i = 0; i < getFiltersCount(); i++) {
5204         if (!getFilters(i).isInitialized()) {
5205           memoizedIsInitialized = 0;
5206           return false;
5207         }
5208       }
5209       memoizedIsInitialized = 1;
5210       return true;
5211     }
5212 
writeTo(com.google.protobuf.CodedOutputStream output)5213     public void writeTo(com.google.protobuf.CodedOutputStream output)
5214                         throws java.io.IOException {
5215       getSerializedSize();
5216       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5217         output.writeEnum(1, operator_.getNumber());
5218       }
5219       for (int i = 0; i < filters_.size(); i++) {
5220         output.writeMessage(2, filters_.get(i));
5221       }
5222       getUnknownFields().writeTo(output);
5223     }
5224 
5225     private int memoizedSerializedSize = -1;
getSerializedSize()5226     public int getSerializedSize() {
5227       int size = memoizedSerializedSize;
5228       if (size != -1) return size;
5229 
5230       size = 0;
5231       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5232         size += com.google.protobuf.CodedOutputStream
5233           .computeEnumSize(1, operator_.getNumber());
5234       }
5235       for (int i = 0; i < filters_.size(); i++) {
5236         size += com.google.protobuf.CodedOutputStream
5237           .computeMessageSize(2, filters_.get(i));
5238       }
5239       size += getUnknownFields().getSerializedSize();
5240       memoizedSerializedSize = size;
5241       return size;
5242     }
5243 
5244     private static final long serialVersionUID = 0L;
5245     @java.lang.Override
writeReplace()5246     protected java.lang.Object writeReplace()
5247         throws java.io.ObjectStreamException {
5248       return super.writeReplace();
5249     }
5250 
5251     @java.lang.Override
equals(final java.lang.Object obj)5252     public boolean equals(final java.lang.Object obj) {
5253       if (obj == this) {
5254        return true;
5255       }
5256       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)) {
5257         return super.equals(obj);
5258       }
5259       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj;
5260 
5261       boolean result = true;
5262       result = result && (hasOperator() == other.hasOperator());
5263       if (hasOperator()) {
5264         result = result &&
5265             (getOperator() == other.getOperator());
5266       }
5267       result = result && getFiltersList()
5268           .equals(other.getFiltersList());
5269       result = result &&
5270           getUnknownFields().equals(other.getUnknownFields());
5271       return result;
5272     }
5273 
5274     private int memoizedHashCode = 0;
5275     @java.lang.Override
hashCode()5276     public int hashCode() {
5277       if (memoizedHashCode != 0) {
5278         return memoizedHashCode;
5279       }
5280       int hash = 41;
5281       hash = (19 * hash) + getDescriptorForType().hashCode();
5282       if (hasOperator()) {
5283         hash = (37 * hash) + OPERATOR_FIELD_NUMBER;
5284         hash = (53 * hash) + hashEnum(getOperator());
5285       }
5286       if (getFiltersCount() > 0) {
5287         hash = (37 * hash) + FILTERS_FIELD_NUMBER;
5288         hash = (53 * hash) + getFiltersList().hashCode();
5289       }
5290       hash = (29 * hash) + getUnknownFields().hashCode();
5291       memoizedHashCode = hash;
5292       return hash;
5293     }
5294 
parseFrom( com.google.protobuf.ByteString data)5295     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
5296         com.google.protobuf.ByteString data)
5297         throws com.google.protobuf.InvalidProtocolBufferException {
5298       return PARSER.parseFrom(data);
5299     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5300     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
5301         com.google.protobuf.ByteString data,
5302         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5303         throws com.google.protobuf.InvalidProtocolBufferException {
5304       return PARSER.parseFrom(data, extensionRegistry);
5305     }
parseFrom(byte[] data)5306     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data)
5307         throws com.google.protobuf.InvalidProtocolBufferException {
5308       return PARSER.parseFrom(data);
5309     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5310     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
5311         byte[] data,
5312         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5313         throws com.google.protobuf.InvalidProtocolBufferException {
5314       return PARSER.parseFrom(data, extensionRegistry);
5315     }
parseFrom(java.io.InputStream input)5316     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input)
5317         throws java.io.IOException {
5318       return PARSER.parseFrom(input);
5319     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5320     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
5321         java.io.InputStream input,
5322         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5323         throws java.io.IOException {
5324       return PARSER.parseFrom(input, extensionRegistry);
5325     }
parseDelimitedFrom(java.io.InputStream input)5326     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input)
5327         throws java.io.IOException {
5328       return PARSER.parseDelimitedFrom(input);
5329     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5330     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(
5331         java.io.InputStream input,
5332         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5333         throws java.io.IOException {
5334       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5335     }
parseFrom( com.google.protobuf.CodedInputStream input)5336     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
5337         com.google.protobuf.CodedInputStream input)
5338         throws java.io.IOException {
5339       return PARSER.parseFrom(input);
5340     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5341     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(
5342         com.google.protobuf.CodedInputStream input,
5343         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5344         throws java.io.IOException {
5345       return PARSER.parseFrom(input, extensionRegistry);
5346     }
5347 
newBuilder()5348     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()5349     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype)5350     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) {
5351       return newBuilder().mergeFrom(prototype);
5352     }
toBuilder()5353     public Builder toBuilder() { return newBuilder(this); }
5354 
5355     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5356     protected Builder newBuilderForType(
5357         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5358       Builder builder = new Builder(parent);
5359       return builder;
5360     }
5361     /**
5362      * Protobuf type {@code FilterList}
5363      */
5364     public static final class Builder extends
5365         com.google.protobuf.GeneratedMessage.Builder<Builder>
5366        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder {
5367       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5368           getDescriptor() {
5369         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor;
5370       }
5371 
5372       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5373           internalGetFieldAccessorTable() {
5374         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable
5375             .ensureFieldAccessorsInitialized(
5376                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class);
5377       }
5378 
5379       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder()
Builder()5380       private Builder() {
5381         maybeForceBuilderInitialization();
5382       }
5383 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5384       private Builder(
5385           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5386         super(parent);
5387         maybeForceBuilderInitialization();
5388       }
maybeForceBuilderInitialization()5389       private void maybeForceBuilderInitialization() {
5390         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5391           getFiltersFieldBuilder();
5392         }
5393       }
create()5394       private static Builder create() {
5395         return new Builder();
5396       }
5397 
clear()5398       public Builder clear() {
5399         super.clear();
5400         operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
5401         bitField0_ = (bitField0_ & ~0x00000001);
5402         if (filtersBuilder_ == null) {
5403           filters_ = java.util.Collections.emptyList();
5404           bitField0_ = (bitField0_ & ~0x00000002);
5405         } else {
5406           filtersBuilder_.clear();
5407         }
5408         return this;
5409       }
5410 
clone()5411       public Builder clone() {
5412         return create().mergeFrom(buildPartial());
5413       }
5414 
5415       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()5416           getDescriptorForType() {
5417         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor;
5418       }
5419 
getDefaultInstanceForType()5420       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() {
5421         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance();
5422       }
5423 
build()5424       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() {
5425         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial();
5426         if (!result.isInitialized()) {
5427           throw newUninitializedMessageException(result);
5428         }
5429         return result;
5430       }
5431 
buildPartial()5432       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() {
5433         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this);
5434         int from_bitField0_ = bitField0_;
5435         int to_bitField0_ = 0;
5436         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5437           to_bitField0_ |= 0x00000001;
5438         }
5439         result.operator_ = operator_;
5440         if (filtersBuilder_ == null) {
5441           if (((bitField0_ & 0x00000002) == 0x00000002)) {
5442             filters_ = java.util.Collections.unmodifiableList(filters_);
5443             bitField0_ = (bitField0_ & ~0x00000002);
5444           }
5445           result.filters_ = filters_;
5446         } else {
5447           result.filters_ = filtersBuilder_.build();
5448         }
5449         result.bitField0_ = to_bitField0_;
5450         onBuilt();
5451         return result;
5452       }
5453 
mergeFrom(com.google.protobuf.Message other)5454       public Builder mergeFrom(com.google.protobuf.Message other) {
5455         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) {
5456           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other);
5457         } else {
5458           super.mergeFrom(other);
5459           return this;
5460         }
5461       }
5462 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other)5463       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) {
5464         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this;
5465         if (other.hasOperator()) {
5466           setOperator(other.getOperator());
5467         }
5468         if (filtersBuilder_ == null) {
5469           if (!other.filters_.isEmpty()) {
5470             if (filters_.isEmpty()) {
5471               filters_ = other.filters_;
5472               bitField0_ = (bitField0_ & ~0x00000002);
5473             } else {
5474               ensureFiltersIsMutable();
5475               filters_.addAll(other.filters_);
5476             }
5477             onChanged();
5478           }
5479         } else {
5480           if (!other.filters_.isEmpty()) {
5481             if (filtersBuilder_.isEmpty()) {
5482               filtersBuilder_.dispose();
5483               filtersBuilder_ = null;
5484               filters_ = other.filters_;
5485               bitField0_ = (bitField0_ & ~0x00000002);
5486               filtersBuilder_ =
5487                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5488                    getFiltersFieldBuilder() : null;
5489             } else {
5490               filtersBuilder_.addAllMessages(other.filters_);
5491             }
5492           }
5493         }
5494         this.mergeUnknownFields(other.getUnknownFields());
5495         return this;
5496       }
5497 
isInitialized()5498       public final boolean isInitialized() {
5499         if (!hasOperator()) {
5500 
5501           return false;
5502         }
5503         for (int i = 0; i < getFiltersCount(); i++) {
5504           if (!getFilters(i).isInitialized()) {
5505 
5506             return false;
5507           }
5508         }
5509         return true;
5510       }
5511 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5512       public Builder mergeFrom(
5513           com.google.protobuf.CodedInputStream input,
5514           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5515           throws java.io.IOException {
5516         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parsedMessage = null;
5517         try {
5518           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5519         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5520           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) e.getUnfinishedMessage();
5521           throw e;
5522         } finally {
5523           if (parsedMessage != null) {
5524             mergeFrom(parsedMessage);
5525           }
5526         }
5527         return this;
5528       }
5529       private int bitField0_;
5530 
5531       // required .FilterList.Operator operator = 1;
5532       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
5533       /**
5534        * <code>required .FilterList.Operator operator = 1;</code>
5535        */
hasOperator()5536       public boolean hasOperator() {
5537         return ((bitField0_ & 0x00000001) == 0x00000001);
5538       }
5539       /**
5540        * <code>required .FilterList.Operator operator = 1;</code>
5541        */
getOperator()5542       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() {
5543         return operator_;
5544       }
5545       /**
5546        * <code>required .FilterList.Operator operator = 1;</code>
5547        */
setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value)5548       public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) {
5549         if (value == null) {
5550           throw new NullPointerException();
5551         }
5552         bitField0_ |= 0x00000001;
5553         operator_ = value;
5554         onChanged();
5555         return this;
5556       }
5557       /**
5558        * <code>required .FilterList.Operator operator = 1;</code>
5559        */
clearOperator()5560       public Builder clearOperator() {
5561         bitField0_ = (bitField0_ & ~0x00000001);
5562         operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL;
5563         onChanged();
5564         return this;
5565       }
5566 
5567       // repeated .Filter filters = 2;
5568       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_ =
5569         java.util.Collections.emptyList();
ensureFiltersIsMutable()5570       private void ensureFiltersIsMutable() {
5571         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
5572           filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>(filters_);
5573           bitField0_ |= 0x00000002;
5574          }
5575       }
5576 
5577       private com.google.protobuf.RepeatedFieldBuilder<
5578           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filtersBuilder_;
5579 
5580       /**
5581        * <code>repeated .Filter filters = 2;</code>
5582        */
getFiltersList()5583       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() {
5584         if (filtersBuilder_ == null) {
5585           return java.util.Collections.unmodifiableList(filters_);
5586         } else {
5587           return filtersBuilder_.getMessageList();
5588         }
5589       }
5590       /**
5591        * <code>repeated .Filter filters = 2;</code>
5592        */
getFiltersCount()5593       public int getFiltersCount() {
5594         if (filtersBuilder_ == null) {
5595           return filters_.size();
5596         } else {
5597           return filtersBuilder_.getCount();
5598         }
5599       }
5600       /**
5601        * <code>repeated .Filter filters = 2;</code>
5602        */
getFilters(int index)5603       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) {
5604         if (filtersBuilder_ == null) {
5605           return filters_.get(index);
5606         } else {
5607           return filtersBuilder_.getMessage(index);
5608         }
5609       }
5610       /**
5611        * <code>repeated .Filter filters = 2;</code>
5612        */
setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)5613       public Builder setFilters(
5614           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
5615         if (filtersBuilder_ == null) {
5616           if (value == null) {
5617             throw new NullPointerException();
5618           }
5619           ensureFiltersIsMutable();
5620           filters_.set(index, value);
5621           onChanged();
5622         } else {
5623           filtersBuilder_.setMessage(index, value);
5624         }
5625         return this;
5626       }
5627       /**
5628        * <code>repeated .Filter filters = 2;</code>
5629        */
setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)5630       public Builder setFilters(
5631           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
5632         if (filtersBuilder_ == null) {
5633           ensureFiltersIsMutable();
5634           filters_.set(index, builderForValue.build());
5635           onChanged();
5636         } else {
5637           filtersBuilder_.setMessage(index, builderForValue.build());
5638         }
5639         return this;
5640       }
5641       /**
5642        * <code>repeated .Filter filters = 2;</code>
5643        */
addFilters(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)5644       public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
5645         if (filtersBuilder_ == null) {
5646           if (value == null) {
5647             throw new NullPointerException();
5648           }
5649           ensureFiltersIsMutable();
5650           filters_.add(value);
5651           onChanged();
5652         } else {
5653           filtersBuilder_.addMessage(value);
5654         }
5655         return this;
5656       }
5657       /**
5658        * <code>repeated .Filter filters = 2;</code>
5659        */
addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)5660       public Builder addFilters(
5661           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
5662         if (filtersBuilder_ == null) {
5663           if (value == null) {
5664             throw new NullPointerException();
5665           }
5666           ensureFiltersIsMutable();
5667           filters_.add(index, value);
5668           onChanged();
5669         } else {
5670           filtersBuilder_.addMessage(index, value);
5671         }
5672         return this;
5673       }
5674       /**
5675        * <code>repeated .Filter filters = 2;</code>
5676        */
addFilters( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)5677       public Builder addFilters(
5678           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
5679         if (filtersBuilder_ == null) {
5680           ensureFiltersIsMutable();
5681           filters_.add(builderForValue.build());
5682           onChanged();
5683         } else {
5684           filtersBuilder_.addMessage(builderForValue.build());
5685         }
5686         return this;
5687       }
5688       /**
5689        * <code>repeated .Filter filters = 2;</code>
5690        */
addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)5691       public Builder addFilters(
5692           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
5693         if (filtersBuilder_ == null) {
5694           ensureFiltersIsMutable();
5695           filters_.add(index, builderForValue.build());
5696           onChanged();
5697         } else {
5698           filtersBuilder_.addMessage(index, builderForValue.build());
5699         }
5700         return this;
5701       }
5702       /**
5703        * <code>repeated .Filter filters = 2;</code>
5704        */
addAllFilters( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> values)5705       public Builder addAllFilters(
5706           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> values) {
5707         if (filtersBuilder_ == null) {
5708           ensureFiltersIsMutable();
5709           super.addAll(values, filters_);
5710           onChanged();
5711         } else {
5712           filtersBuilder_.addAllMessages(values);
5713         }
5714         return this;
5715       }
5716       /**
5717        * <code>repeated .Filter filters = 2;</code>
5718        */
clearFilters()5719       public Builder clearFilters() {
5720         if (filtersBuilder_ == null) {
5721           filters_ = java.util.Collections.emptyList();
5722           bitField0_ = (bitField0_ & ~0x00000002);
5723           onChanged();
5724         } else {
5725           filtersBuilder_.clear();
5726         }
5727         return this;
5728       }
5729       /**
5730        * <code>repeated .Filter filters = 2;</code>
5731        */
removeFilters(int index)5732       public Builder removeFilters(int index) {
5733         if (filtersBuilder_ == null) {
5734           ensureFiltersIsMutable();
5735           filters_.remove(index);
5736           onChanged();
5737         } else {
5738           filtersBuilder_.remove(index);
5739         }
5740         return this;
5741       }
5742       /**
5743        * <code>repeated .Filter filters = 2;</code>
5744        */
getFiltersBuilder( int index)5745       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFiltersBuilder(
5746           int index) {
5747         return getFiltersFieldBuilder().getBuilder(index);
5748       }
5749       /**
5750        * <code>repeated .Filter filters = 2;</code>
5751        */
getFiltersOrBuilder( int index)5752       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder(
5753           int index) {
5754         if (filtersBuilder_ == null) {
5755           return filters_.get(index);  } else {
5756           return filtersBuilder_.getMessageOrBuilder(index);
5757         }
5758       }
5759       /**
5760        * <code>repeated .Filter filters = 2;</code>
5761        */
5762       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersOrBuilderList()5763            getFiltersOrBuilderList() {
5764         if (filtersBuilder_ != null) {
5765           return filtersBuilder_.getMessageOrBuilderList();
5766         } else {
5767           return java.util.Collections.unmodifiableList(filters_);
5768         }
5769       }
5770       /**
5771        * <code>repeated .Filter filters = 2;</code>
5772        */
addFiltersBuilder()5773       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder() {
5774         return getFiltersFieldBuilder().addBuilder(
5775             org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance());
5776       }
5777       /**
5778        * <code>repeated .Filter filters = 2;</code>
5779        */
addFiltersBuilder( int index)5780       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder(
5781           int index) {
5782         return getFiltersFieldBuilder().addBuilder(
5783             index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance());
5784       }
5785       /**
5786        * <code>repeated .Filter filters = 2;</code>
5787        */
5788       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder>
getFiltersBuilderList()5789            getFiltersBuilderList() {
5790         return getFiltersFieldBuilder().getBuilderList();
5791       }
5792       private com.google.protobuf.RepeatedFieldBuilder<
5793           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFiltersFieldBuilder()5794           getFiltersFieldBuilder() {
5795         if (filtersBuilder_ == null) {
5796           filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5797               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
5798                   filters_,
5799                   ((bitField0_ & 0x00000002) == 0x00000002),
5800                   getParentForChildren(),
5801                   isClean());
5802           filters_ = null;
5803         }
5804         return filtersBuilder_;
5805       }
5806 
5807       // @@protoc_insertion_point(builder_scope:FilterList)
5808     }
5809 
5810     static {
5811       defaultInstance = new FilterList(true);
defaultInstance.initFields()5812       defaultInstance.initFields();
5813     }
5814 
5815     // @@protoc_insertion_point(class_scope:FilterList)
5816   }
5817 
5818   public interface FilterWrapperOrBuilder
5819       extends com.google.protobuf.MessageOrBuilder {
5820 
5821     // required .Filter filter = 1;
5822     /**
5823      * <code>required .Filter filter = 1;</code>
5824      */
hasFilter()5825     boolean hasFilter();
5826     /**
5827      * <code>required .Filter filter = 1;</code>
5828      */
getFilter()5829     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
5830     /**
5831      * <code>required .Filter filter = 1;</code>
5832      */
getFilterOrBuilder()5833     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
5834   }
5835   /**
5836    * Protobuf type {@code FilterWrapper}
5837    */
5838   public static final class FilterWrapper extends
5839       com.google.protobuf.GeneratedMessage
5840       implements FilterWrapperOrBuilder {
5841     // Use FilterWrapper.newBuilder() to construct.
FilterWrapper(com.google.protobuf.GeneratedMessage.Builder<?> builder)5842     private FilterWrapper(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5843       super(builder);
5844       this.unknownFields = builder.getUnknownFields();
5845     }
FilterWrapper(boolean noInit)5846     private FilterWrapper(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5847 
5848     private static final FilterWrapper defaultInstance;
getDefaultInstance()5849     public static FilterWrapper getDefaultInstance() {
5850       return defaultInstance;
5851     }
5852 
getDefaultInstanceForType()5853     public FilterWrapper getDefaultInstanceForType() {
5854       return defaultInstance;
5855     }
5856 
5857     private final com.google.protobuf.UnknownFieldSet unknownFields;
5858     @java.lang.Override
5859     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()5860         getUnknownFields() {
5861       return this.unknownFields;
5862     }
FilterWrapper( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5863     private FilterWrapper(
5864         com.google.protobuf.CodedInputStream input,
5865         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5866         throws com.google.protobuf.InvalidProtocolBufferException {
5867       initFields();
5868       int mutable_bitField0_ = 0;
5869       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5870           com.google.protobuf.UnknownFieldSet.newBuilder();
5871       try {
5872         boolean done = false;
5873         while (!done) {
5874           int tag = input.readTag();
5875           switch (tag) {
5876             case 0:
5877               done = true;
5878               break;
5879             default: {
5880               if (!parseUnknownField(input, unknownFields,
5881                                      extensionRegistry, tag)) {
5882                 done = true;
5883               }
5884               break;
5885             }
5886             case 10: {
5887               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
5888               if (((bitField0_ & 0x00000001) == 0x00000001)) {
5889                 subBuilder = filter_.toBuilder();
5890               }
5891               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
5892               if (subBuilder != null) {
5893                 subBuilder.mergeFrom(filter_);
5894                 filter_ = subBuilder.buildPartial();
5895               }
5896               bitField0_ |= 0x00000001;
5897               break;
5898             }
5899           }
5900         }
5901       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5902         throw e.setUnfinishedMessage(this);
5903       } catch (java.io.IOException e) {
5904         throw new com.google.protobuf.InvalidProtocolBufferException(
5905             e.getMessage()).setUnfinishedMessage(this);
5906       } finally {
5907         this.unknownFields = unknownFields.build();
5908         makeExtensionsImmutable();
5909       }
5910     }
5911     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5912         getDescriptor() {
5913       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor;
5914     }
5915 
5916     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5917         internalGetFieldAccessorTable() {
5918       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable
5919           .ensureFieldAccessorsInitialized(
5920               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class);
5921     }
5922 
5923     public static com.google.protobuf.Parser<FilterWrapper> PARSER =
5924         new com.google.protobuf.AbstractParser<FilterWrapper>() {
5925       public FilterWrapper parsePartialFrom(
5926           com.google.protobuf.CodedInputStream input,
5927           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5928           throws com.google.protobuf.InvalidProtocolBufferException {
5929         return new FilterWrapper(input, extensionRegistry);
5930       }
5931     };
5932 
5933     @java.lang.Override
getParserForType()5934     public com.google.protobuf.Parser<FilterWrapper> getParserForType() {
5935       return PARSER;
5936     }
5937 
5938     private int bitField0_;
5939     // required .Filter filter = 1;
5940     public static final int FILTER_FIELD_NUMBER = 1;
5941     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
5942     /**
5943      * <code>required .Filter filter = 1;</code>
5944      */
hasFilter()5945     public boolean hasFilter() {
5946       return ((bitField0_ & 0x00000001) == 0x00000001);
5947     }
5948     /**
5949      * <code>required .Filter filter = 1;</code>
5950      */
getFilter()5951     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
5952       return filter_;
5953     }
5954     /**
5955      * <code>required .Filter filter = 1;</code>
5956      */
getFilterOrBuilder()5957     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
5958       return filter_;
5959     }
5960 
initFields()5961     private void initFields() {
5962       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
5963     }
5964     private byte memoizedIsInitialized = -1;
isInitialized()5965     public final boolean isInitialized() {
5966       byte isInitialized = memoizedIsInitialized;
5967       if (isInitialized != -1) return isInitialized == 1;
5968 
5969       if (!hasFilter()) {
5970         memoizedIsInitialized = 0;
5971         return false;
5972       }
5973       if (!getFilter().isInitialized()) {
5974         memoizedIsInitialized = 0;
5975         return false;
5976       }
5977       memoizedIsInitialized = 1;
5978       return true;
5979     }
5980 
writeTo(com.google.protobuf.CodedOutputStream output)5981     public void writeTo(com.google.protobuf.CodedOutputStream output)
5982                         throws java.io.IOException {
5983       getSerializedSize();
5984       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5985         output.writeMessage(1, filter_);
5986       }
5987       getUnknownFields().writeTo(output);
5988     }
5989 
5990     private int memoizedSerializedSize = -1;
getSerializedSize()5991     public int getSerializedSize() {
5992       int size = memoizedSerializedSize;
5993       if (size != -1) return size;
5994 
5995       size = 0;
5996       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5997         size += com.google.protobuf.CodedOutputStream
5998           .computeMessageSize(1, filter_);
5999       }
6000       size += getUnknownFields().getSerializedSize();
6001       memoizedSerializedSize = size;
6002       return size;
6003     }
6004 
6005     private static final long serialVersionUID = 0L;
6006     @java.lang.Override
writeReplace()6007     protected java.lang.Object writeReplace()
6008         throws java.io.ObjectStreamException {
6009       return super.writeReplace();
6010     }
6011 
6012     @java.lang.Override
equals(final java.lang.Object obj)6013     public boolean equals(final java.lang.Object obj) {
6014       if (obj == this) {
6015        return true;
6016       }
6017       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)) {
6018         return super.equals(obj);
6019       }
6020       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj;
6021 
6022       boolean result = true;
6023       result = result && (hasFilter() == other.hasFilter());
6024       if (hasFilter()) {
6025         result = result && getFilter()
6026             .equals(other.getFilter());
6027       }
6028       result = result &&
6029           getUnknownFields().equals(other.getUnknownFields());
6030       return result;
6031     }
6032 
6033     private int memoizedHashCode = 0;
6034     @java.lang.Override
hashCode()6035     public int hashCode() {
6036       if (memoizedHashCode != 0) {
6037         return memoizedHashCode;
6038       }
6039       int hash = 41;
6040       hash = (19 * hash) + getDescriptorForType().hashCode();
6041       if (hasFilter()) {
6042         hash = (37 * hash) + FILTER_FIELD_NUMBER;
6043         hash = (53 * hash) + getFilter().hashCode();
6044       }
6045       hash = (29 * hash) + getUnknownFields().hashCode();
6046       memoizedHashCode = hash;
6047       return hash;
6048     }
6049 
parseFrom( com.google.protobuf.ByteString data)6050     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
6051         com.google.protobuf.ByteString data)
6052         throws com.google.protobuf.InvalidProtocolBufferException {
6053       return PARSER.parseFrom(data);
6054     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6055     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
6056         com.google.protobuf.ByteString data,
6057         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6058         throws com.google.protobuf.InvalidProtocolBufferException {
6059       return PARSER.parseFrom(data, extensionRegistry);
6060     }
parseFrom(byte[] data)6061     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data)
6062         throws com.google.protobuf.InvalidProtocolBufferException {
6063       return PARSER.parseFrom(data);
6064     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6065     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
6066         byte[] data,
6067         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6068         throws com.google.protobuf.InvalidProtocolBufferException {
6069       return PARSER.parseFrom(data, extensionRegistry);
6070     }
parseFrom(java.io.InputStream input)6071     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input)
6072         throws java.io.IOException {
6073       return PARSER.parseFrom(input);
6074     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6075     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
6076         java.io.InputStream input,
6077         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6078         throws java.io.IOException {
6079       return PARSER.parseFrom(input, extensionRegistry);
6080     }
parseDelimitedFrom(java.io.InputStream input)6081     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input)
6082         throws java.io.IOException {
6083       return PARSER.parseDelimitedFrom(input);
6084     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6085     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(
6086         java.io.InputStream input,
6087         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6088         throws java.io.IOException {
6089       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6090     }
parseFrom( com.google.protobuf.CodedInputStream input)6091     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
6092         com.google.protobuf.CodedInputStream input)
6093         throws java.io.IOException {
6094       return PARSER.parseFrom(input);
6095     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6096     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(
6097         com.google.protobuf.CodedInputStream input,
6098         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6099         throws java.io.IOException {
6100       return PARSER.parseFrom(input, extensionRegistry);
6101     }
6102 
newBuilder()6103     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6104     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype)6105     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) {
6106       return newBuilder().mergeFrom(prototype);
6107     }
toBuilder()6108     public Builder toBuilder() { return newBuilder(this); }
6109 
6110     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6111     protected Builder newBuilderForType(
6112         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6113       Builder builder = new Builder(parent);
6114       return builder;
6115     }
6116     /**
6117      * Protobuf type {@code FilterWrapper}
6118      */
6119     public static final class Builder extends
6120         com.google.protobuf.GeneratedMessage.Builder<Builder>
6121        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder {
6122       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6123           getDescriptor() {
6124         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor;
6125       }
6126 
6127       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6128           internalGetFieldAccessorTable() {
6129         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable
6130             .ensureFieldAccessorsInitialized(
6131                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class);
6132       }
6133 
6134       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder()
Builder()6135       private Builder() {
6136         maybeForceBuilderInitialization();
6137       }
6138 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6139       private Builder(
6140           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6141         super(parent);
6142         maybeForceBuilderInitialization();
6143       }
maybeForceBuilderInitialization()6144       private void maybeForceBuilderInitialization() {
6145         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6146           getFilterFieldBuilder();
6147         }
6148       }
create()6149       private static Builder create() {
6150         return new Builder();
6151       }
6152 
clear()6153       public Builder clear() {
6154         super.clear();
6155         if (filterBuilder_ == null) {
6156           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
6157         } else {
6158           filterBuilder_.clear();
6159         }
6160         bitField0_ = (bitField0_ & ~0x00000001);
6161         return this;
6162       }
6163 
clone()6164       public Builder clone() {
6165         return create().mergeFrom(buildPartial());
6166       }
6167 
6168       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6169           getDescriptorForType() {
6170         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor;
6171       }
6172 
getDefaultInstanceForType()6173       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() {
6174         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance();
6175       }
6176 
build()6177       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() {
6178         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial();
6179         if (!result.isInitialized()) {
6180           throw newUninitializedMessageException(result);
6181         }
6182         return result;
6183       }
6184 
buildPartial()6185       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() {
6186         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this);
6187         int from_bitField0_ = bitField0_;
6188         int to_bitField0_ = 0;
6189         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6190           to_bitField0_ |= 0x00000001;
6191         }
6192         if (filterBuilder_ == null) {
6193           result.filter_ = filter_;
6194         } else {
6195           result.filter_ = filterBuilder_.build();
6196         }
6197         result.bitField0_ = to_bitField0_;
6198         onBuilt();
6199         return result;
6200       }
6201 
mergeFrom(com.google.protobuf.Message other)6202       public Builder mergeFrom(com.google.protobuf.Message other) {
6203         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) {
6204           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other);
6205         } else {
6206           super.mergeFrom(other);
6207           return this;
6208         }
6209       }
6210 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other)6211       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) {
6212         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this;
6213         if (other.hasFilter()) {
6214           mergeFilter(other.getFilter());
6215         }
6216         this.mergeUnknownFields(other.getUnknownFields());
6217         return this;
6218       }
6219 
isInitialized()6220       public final boolean isInitialized() {
6221         if (!hasFilter()) {
6222 
6223           return false;
6224         }
6225         if (!getFilter().isInitialized()) {
6226 
6227           return false;
6228         }
6229         return true;
6230       }
6231 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6232       public Builder mergeFrom(
6233           com.google.protobuf.CodedInputStream input,
6234           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6235           throws java.io.IOException {
6236         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parsedMessage = null;
6237         try {
6238           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6239         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6240           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) e.getUnfinishedMessage();
6241           throw e;
6242         } finally {
6243           if (parsedMessage != null) {
6244             mergeFrom(parsedMessage);
6245           }
6246         }
6247         return this;
6248       }
6249       private int bitField0_;
6250 
6251       // required .Filter filter = 1;
6252       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
6253       private com.google.protobuf.SingleFieldBuilder<
6254           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
6255       /**
6256        * <code>required .Filter filter = 1;</code>
6257        */
hasFilter()6258       public boolean hasFilter() {
6259         return ((bitField0_ & 0x00000001) == 0x00000001);
6260       }
6261       /**
6262        * <code>required .Filter filter = 1;</code>
6263        */
getFilter()6264       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
6265         if (filterBuilder_ == null) {
6266           return filter_;
6267         } else {
6268           return filterBuilder_.getMessage();
6269         }
6270       }
6271       /**
6272        * <code>required .Filter filter = 1;</code>
6273        */
setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)6274       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
6275         if (filterBuilder_ == null) {
6276           if (value == null) {
6277             throw new NullPointerException();
6278           }
6279           filter_ = value;
6280           onChanged();
6281         } else {
6282           filterBuilder_.setMessage(value);
6283         }
6284         bitField0_ |= 0x00000001;
6285         return this;
6286       }
6287       /**
6288        * <code>required .Filter filter = 1;</code>
6289        */
setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)6290       public Builder setFilter(
6291           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
6292         if (filterBuilder_ == null) {
6293           filter_ = builderForValue.build();
6294           onChanged();
6295         } else {
6296           filterBuilder_.setMessage(builderForValue.build());
6297         }
6298         bitField0_ |= 0x00000001;
6299         return this;
6300       }
6301       /**
6302        * <code>required .Filter filter = 1;</code>
6303        */
mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)6304       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
6305         if (filterBuilder_ == null) {
6306           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6307               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
6308             filter_ =
6309               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
6310           } else {
6311             filter_ = value;
6312           }
6313           onChanged();
6314         } else {
6315           filterBuilder_.mergeFrom(value);
6316         }
6317         bitField0_ |= 0x00000001;
6318         return this;
6319       }
6320       /**
6321        * <code>required .Filter filter = 1;</code>
6322        */
clearFilter()6323       public Builder clearFilter() {
6324         if (filterBuilder_ == null) {
6325           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
6326           onChanged();
6327         } else {
6328           filterBuilder_.clear();
6329         }
6330         bitField0_ = (bitField0_ & ~0x00000001);
6331         return this;
6332       }
6333       /**
6334        * <code>required .Filter filter = 1;</code>
6335        */
getFilterBuilder()6336       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
6337         bitField0_ |= 0x00000001;
6338         onChanged();
6339         return getFilterFieldBuilder().getBuilder();
6340       }
6341       /**
6342        * <code>required .Filter filter = 1;</code>
6343        */
getFilterOrBuilder()6344       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
6345         if (filterBuilder_ != null) {
6346           return filterBuilder_.getMessageOrBuilder();
6347         } else {
6348           return filter_;
6349         }
6350       }
6351       /**
6352        * <code>required .Filter filter = 1;</code>
6353        */
6354       private com.google.protobuf.SingleFieldBuilder<
6355           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder()6356           getFilterFieldBuilder() {
6357         if (filterBuilder_ == null) {
6358           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6359               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
6360                   filter_,
6361                   getParentForChildren(),
6362                   isClean());
6363           filter_ = null;
6364         }
6365         return filterBuilder_;
6366       }
6367 
6368       // @@protoc_insertion_point(builder_scope:FilterWrapper)
6369     }
6370 
6371     static {
6372       defaultInstance = new FilterWrapper(true);
defaultInstance.initFields()6373       defaultInstance.initFields();
6374     }
6375 
6376     // @@protoc_insertion_point(class_scope:FilterWrapper)
6377   }
6378 
6379   public interface FirstKeyOnlyFilterOrBuilder
6380       extends com.google.protobuf.MessageOrBuilder {
6381   }
6382   /**
6383    * Protobuf type {@code FirstKeyOnlyFilter}
6384    */
6385   public static final class FirstKeyOnlyFilter extends
6386       com.google.protobuf.GeneratedMessage
6387       implements FirstKeyOnlyFilterOrBuilder {
6388     // Use FirstKeyOnlyFilter.newBuilder() to construct.
FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)6389     private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6390       super(builder);
6391       this.unknownFields = builder.getUnknownFields();
6392     }
FirstKeyOnlyFilter(boolean noInit)6393     private FirstKeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6394 
6395     private static final FirstKeyOnlyFilter defaultInstance;
getDefaultInstance()6396     public static FirstKeyOnlyFilter getDefaultInstance() {
6397       return defaultInstance;
6398     }
6399 
getDefaultInstanceForType()6400     public FirstKeyOnlyFilter getDefaultInstanceForType() {
6401       return defaultInstance;
6402     }
6403 
6404     private final com.google.protobuf.UnknownFieldSet unknownFields;
6405     @java.lang.Override
6406     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()6407         getUnknownFields() {
6408       return this.unknownFields;
6409     }
FirstKeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6410     private FirstKeyOnlyFilter(
6411         com.google.protobuf.CodedInputStream input,
6412         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6413         throws com.google.protobuf.InvalidProtocolBufferException {
6414       initFields();
6415       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6416           com.google.protobuf.UnknownFieldSet.newBuilder();
6417       try {
6418         boolean done = false;
6419         while (!done) {
6420           int tag = input.readTag();
6421           switch (tag) {
6422             case 0:
6423               done = true;
6424               break;
6425             default: {
6426               if (!parseUnknownField(input, unknownFields,
6427                                      extensionRegistry, tag)) {
6428                 done = true;
6429               }
6430               break;
6431             }
6432           }
6433         }
6434       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6435         throw e.setUnfinishedMessage(this);
6436       } catch (java.io.IOException e) {
6437         throw new com.google.protobuf.InvalidProtocolBufferException(
6438             e.getMessage()).setUnfinishedMessage(this);
6439       } finally {
6440         this.unknownFields = unknownFields.build();
6441         makeExtensionsImmutable();
6442       }
6443     }
6444     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6445         getDescriptor() {
6446       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor;
6447     }
6448 
6449     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6450         internalGetFieldAccessorTable() {
6451       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable
6452           .ensureFieldAccessorsInitialized(
6453               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class);
6454     }
6455 
6456     public static com.google.protobuf.Parser<FirstKeyOnlyFilter> PARSER =
6457         new com.google.protobuf.AbstractParser<FirstKeyOnlyFilter>() {
6458       public FirstKeyOnlyFilter parsePartialFrom(
6459           com.google.protobuf.CodedInputStream input,
6460           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6461           throws com.google.protobuf.InvalidProtocolBufferException {
6462         return new FirstKeyOnlyFilter(input, extensionRegistry);
6463       }
6464     };
6465 
6466     @java.lang.Override
getParserForType()6467     public com.google.protobuf.Parser<FirstKeyOnlyFilter> getParserForType() {
6468       return PARSER;
6469     }
6470 
initFields()6471     private void initFields() {
6472     }
6473     private byte memoizedIsInitialized = -1;
isInitialized()6474     public final boolean isInitialized() {
6475       byte isInitialized = memoizedIsInitialized;
6476       if (isInitialized != -1) return isInitialized == 1;
6477 
6478       memoizedIsInitialized = 1;
6479       return true;
6480     }
6481 
writeTo(com.google.protobuf.CodedOutputStream output)6482     public void writeTo(com.google.protobuf.CodedOutputStream output)
6483                         throws java.io.IOException {
6484       getSerializedSize();
6485       getUnknownFields().writeTo(output);
6486     }
6487 
6488     private int memoizedSerializedSize = -1;
getSerializedSize()6489     public int getSerializedSize() {
6490       int size = memoizedSerializedSize;
6491       if (size != -1) return size;
6492 
6493       size = 0;
6494       size += getUnknownFields().getSerializedSize();
6495       memoizedSerializedSize = size;
6496       return size;
6497     }
6498 
6499     private static final long serialVersionUID = 0L;
6500     @java.lang.Override
writeReplace()6501     protected java.lang.Object writeReplace()
6502         throws java.io.ObjectStreamException {
6503       return super.writeReplace();
6504     }
6505 
6506     @java.lang.Override
equals(final java.lang.Object obj)6507     public boolean equals(final java.lang.Object obj) {
6508       if (obj == this) {
6509        return true;
6510       }
6511       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)) {
6512         return super.equals(obj);
6513       }
6514       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj;
6515 
6516       boolean result = true;
6517       result = result &&
6518           getUnknownFields().equals(other.getUnknownFields());
6519       return result;
6520     }
6521 
6522     private int memoizedHashCode = 0;
6523     @java.lang.Override
hashCode()6524     public int hashCode() {
6525       if (memoizedHashCode != 0) {
6526         return memoizedHashCode;
6527       }
6528       int hash = 41;
6529       hash = (19 * hash) + getDescriptorForType().hashCode();
6530       hash = (29 * hash) + getUnknownFields().hashCode();
6531       memoizedHashCode = hash;
6532       return hash;
6533     }
6534 
parseFrom( com.google.protobuf.ByteString data)6535     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
6536         com.google.protobuf.ByteString data)
6537         throws com.google.protobuf.InvalidProtocolBufferException {
6538       return PARSER.parseFrom(data);
6539     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6540     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
6541         com.google.protobuf.ByteString data,
6542         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6543         throws com.google.protobuf.InvalidProtocolBufferException {
6544       return PARSER.parseFrom(data, extensionRegistry);
6545     }
parseFrom(byte[] data)6546     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data)
6547         throws com.google.protobuf.InvalidProtocolBufferException {
6548       return PARSER.parseFrom(data);
6549     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6550     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
6551         byte[] data,
6552         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6553         throws com.google.protobuf.InvalidProtocolBufferException {
6554       return PARSER.parseFrom(data, extensionRegistry);
6555     }
parseFrom(java.io.InputStream input)6556     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input)
6557         throws java.io.IOException {
6558       return PARSER.parseFrom(input);
6559     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6560     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
6561         java.io.InputStream input,
6562         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6563         throws java.io.IOException {
6564       return PARSER.parseFrom(input, extensionRegistry);
6565     }
parseDelimitedFrom(java.io.InputStream input)6566     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input)
6567         throws java.io.IOException {
6568       return PARSER.parseDelimitedFrom(input);
6569     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6570     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(
6571         java.io.InputStream input,
6572         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6573         throws java.io.IOException {
6574       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6575     }
parseFrom( com.google.protobuf.CodedInputStream input)6576     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
6577         com.google.protobuf.CodedInputStream input)
6578         throws java.io.IOException {
6579       return PARSER.parseFrom(input);
6580     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6581     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(
6582         com.google.protobuf.CodedInputStream input,
6583         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6584         throws java.io.IOException {
6585       return PARSER.parseFrom(input, extensionRegistry);
6586     }
6587 
newBuilder()6588     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6589     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype)6590     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) {
6591       return newBuilder().mergeFrom(prototype);
6592     }
toBuilder()6593     public Builder toBuilder() { return newBuilder(this); }
6594 
6595     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6596     protected Builder newBuilderForType(
6597         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6598       Builder builder = new Builder(parent);
6599       return builder;
6600     }
6601     /**
6602      * Protobuf type {@code FirstKeyOnlyFilter}
6603      */
6604     public static final class Builder extends
6605         com.google.protobuf.GeneratedMessage.Builder<Builder>
6606        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder {
6607       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6608           getDescriptor() {
6609         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor;
6610       }
6611 
6612       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6613           internalGetFieldAccessorTable() {
6614         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable
6615             .ensureFieldAccessorsInitialized(
6616                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class);
6617       }
6618 
6619       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder()
Builder()6620       private Builder() {
6621         maybeForceBuilderInitialization();
6622       }
6623 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6624       private Builder(
6625           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6626         super(parent);
6627         maybeForceBuilderInitialization();
6628       }
maybeForceBuilderInitialization()6629       private void maybeForceBuilderInitialization() {
6630         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6631         }
6632       }
create()6633       private static Builder create() {
6634         return new Builder();
6635       }
6636 
clear()6637       public Builder clear() {
6638         super.clear();
6639         return this;
6640       }
6641 
clone()6642       public Builder clone() {
6643         return create().mergeFrom(buildPartial());
6644       }
6645 
6646       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6647           getDescriptorForType() {
6648         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor;
6649       }
6650 
getDefaultInstanceForType()6651       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() {
6652         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance();
6653       }
6654 
build()6655       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() {
6656         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial();
6657         if (!result.isInitialized()) {
6658           throw newUninitializedMessageException(result);
6659         }
6660         return result;
6661       }
6662 
buildPartial()6663       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() {
6664         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this);
6665         onBuilt();
6666         return result;
6667       }
6668 
mergeFrom(com.google.protobuf.Message other)6669       public Builder mergeFrom(com.google.protobuf.Message other) {
6670         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) {
6671           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other);
6672         } else {
6673           super.mergeFrom(other);
6674           return this;
6675         }
6676       }
6677 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other)6678       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) {
6679         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this;
6680         this.mergeUnknownFields(other.getUnknownFields());
6681         return this;
6682       }
6683 
isInitialized()6684       public final boolean isInitialized() {
6685         return true;
6686       }
6687 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6688       public Builder mergeFrom(
6689           com.google.protobuf.CodedInputStream input,
6690           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6691           throws java.io.IOException {
6692         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parsedMessage = null;
6693         try {
6694           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6695         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6696           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) e.getUnfinishedMessage();
6697           throw e;
6698         } finally {
6699           if (parsedMessage != null) {
6700             mergeFrom(parsedMessage);
6701           }
6702         }
6703         return this;
6704       }
6705 
6706       // @@protoc_insertion_point(builder_scope:FirstKeyOnlyFilter)
6707     }
6708 
6709     static {
6710       defaultInstance = new FirstKeyOnlyFilter(true);
defaultInstance.initFields()6711       defaultInstance.initFields();
6712     }
6713 
6714     // @@protoc_insertion_point(class_scope:FirstKeyOnlyFilter)
6715   }
6716 
6717   public interface FirstKeyValueMatchingQualifiersFilterOrBuilder
6718       extends com.google.protobuf.MessageOrBuilder {
6719 
6720     // repeated bytes qualifiers = 1;
6721     /**
6722      * <code>repeated bytes qualifiers = 1;</code>
6723      */
getQualifiersList()6724     java.util.List<com.google.protobuf.ByteString> getQualifiersList();
6725     /**
6726      * <code>repeated bytes qualifiers = 1;</code>
6727      */
getQualifiersCount()6728     int getQualifiersCount();
6729     /**
6730      * <code>repeated bytes qualifiers = 1;</code>
6731      */
getQualifiers(int index)6732     com.google.protobuf.ByteString getQualifiers(int index);
6733   }
6734   /**
6735    * Protobuf type {@code FirstKeyValueMatchingQualifiersFilter}
6736    */
6737   public static final class FirstKeyValueMatchingQualifiersFilter extends
6738       com.google.protobuf.GeneratedMessage
6739       implements FirstKeyValueMatchingQualifiersFilterOrBuilder {
6740     // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct.
FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)6741     private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6742       super(builder);
6743       this.unknownFields = builder.getUnknownFields();
6744     }
FirstKeyValueMatchingQualifiersFilter(boolean noInit)6745     private FirstKeyValueMatchingQualifiersFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6746 
6747     private static final FirstKeyValueMatchingQualifiersFilter defaultInstance;
getDefaultInstance()6748     public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() {
6749       return defaultInstance;
6750     }
6751 
getDefaultInstanceForType()6752     public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() {
6753       return defaultInstance;
6754     }
6755 
6756     private final com.google.protobuf.UnknownFieldSet unknownFields;
6757     @java.lang.Override
6758     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()6759         getUnknownFields() {
6760       return this.unknownFields;
6761     }
FirstKeyValueMatchingQualifiersFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6762     private FirstKeyValueMatchingQualifiersFilter(
6763         com.google.protobuf.CodedInputStream input,
6764         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6765         throws com.google.protobuf.InvalidProtocolBufferException {
6766       initFields();
6767       int mutable_bitField0_ = 0;
6768       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6769           com.google.protobuf.UnknownFieldSet.newBuilder();
6770       try {
6771         boolean done = false;
6772         while (!done) {
6773           int tag = input.readTag();
6774           switch (tag) {
6775             case 0:
6776               done = true;
6777               break;
6778             default: {
6779               if (!parseUnknownField(input, unknownFields,
6780                                      extensionRegistry, tag)) {
6781                 done = true;
6782               }
6783               break;
6784             }
6785             case 10: {
6786               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6787                 qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
6788                 mutable_bitField0_ |= 0x00000001;
6789               }
6790               qualifiers_.add(input.readBytes());
6791               break;
6792             }
6793           }
6794         }
6795       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6796         throw e.setUnfinishedMessage(this);
6797       } catch (java.io.IOException e) {
6798         throw new com.google.protobuf.InvalidProtocolBufferException(
6799             e.getMessage()).setUnfinishedMessage(this);
6800       } finally {
6801         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
6802           qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_);
6803         }
6804         this.unknownFields = unknownFields.build();
6805         makeExtensionsImmutable();
6806       }
6807     }
6808     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6809         getDescriptor() {
6810       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor;
6811     }
6812 
6813     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6814         internalGetFieldAccessorTable() {
6815       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable
6816           .ensureFieldAccessorsInitialized(
6817               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class);
6818     }
6819 
6820     public static com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> PARSER =
6821         new com.google.protobuf.AbstractParser<FirstKeyValueMatchingQualifiersFilter>() {
6822       public FirstKeyValueMatchingQualifiersFilter parsePartialFrom(
6823           com.google.protobuf.CodedInputStream input,
6824           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6825           throws com.google.protobuf.InvalidProtocolBufferException {
6826         return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry);
6827       }
6828     };
6829 
6830     @java.lang.Override
getParserForType()6831     public com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> getParserForType() {
6832       return PARSER;
6833     }
6834 
6835     // repeated bytes qualifiers = 1;
6836     public static final int QUALIFIERS_FIELD_NUMBER = 1;
6837     private java.util.List<com.google.protobuf.ByteString> qualifiers_;
6838     /**
6839      * <code>repeated bytes qualifiers = 1;</code>
6840      */
6841     public java.util.List<com.google.protobuf.ByteString>
getQualifiersList()6842         getQualifiersList() {
6843       return qualifiers_;
6844     }
6845     /**
6846      * <code>repeated bytes qualifiers = 1;</code>
6847      */
getQualifiersCount()6848     public int getQualifiersCount() {
6849       return qualifiers_.size();
6850     }
6851     /**
6852      * <code>repeated bytes qualifiers = 1;</code>
6853      */
getQualifiers(int index)6854     public com.google.protobuf.ByteString getQualifiers(int index) {
6855       return qualifiers_.get(index);
6856     }
6857 
initFields()6858     private void initFields() {
6859       qualifiers_ = java.util.Collections.emptyList();
6860     }
6861     private byte memoizedIsInitialized = -1;
isInitialized()6862     public final boolean isInitialized() {
6863       byte isInitialized = memoizedIsInitialized;
6864       if (isInitialized != -1) return isInitialized == 1;
6865 
6866       memoizedIsInitialized = 1;
6867       return true;
6868     }
6869 
writeTo(com.google.protobuf.CodedOutputStream output)6870     public void writeTo(com.google.protobuf.CodedOutputStream output)
6871                         throws java.io.IOException {
6872       getSerializedSize();
6873       for (int i = 0; i < qualifiers_.size(); i++) {
6874         output.writeBytes(1, qualifiers_.get(i));
6875       }
6876       getUnknownFields().writeTo(output);
6877     }
6878 
6879     private int memoizedSerializedSize = -1;
getSerializedSize()6880     public int getSerializedSize() {
6881       int size = memoizedSerializedSize;
6882       if (size != -1) return size;
6883 
6884       size = 0;
6885       {
6886         int dataSize = 0;
6887         for (int i = 0; i < qualifiers_.size(); i++) {
6888           dataSize += com.google.protobuf.CodedOutputStream
6889             .computeBytesSizeNoTag(qualifiers_.get(i));
6890         }
6891         size += dataSize;
6892         size += 1 * getQualifiersList().size();
6893       }
6894       size += getUnknownFields().getSerializedSize();
6895       memoizedSerializedSize = size;
6896       return size;
6897     }
6898 
6899     private static final long serialVersionUID = 0L;
6900     @java.lang.Override
writeReplace()6901     protected java.lang.Object writeReplace()
6902         throws java.io.ObjectStreamException {
6903       return super.writeReplace();
6904     }
6905 
6906     @java.lang.Override
equals(final java.lang.Object obj)6907     public boolean equals(final java.lang.Object obj) {
6908       if (obj == this) {
6909        return true;
6910       }
6911       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)) {
6912         return super.equals(obj);
6913       }
6914       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj;
6915 
6916       boolean result = true;
6917       result = result && getQualifiersList()
6918           .equals(other.getQualifiersList());
6919       result = result &&
6920           getUnknownFields().equals(other.getUnknownFields());
6921       return result;
6922     }
6923 
6924     private int memoizedHashCode = 0;
6925     @java.lang.Override
hashCode()6926     public int hashCode() {
6927       if (memoizedHashCode != 0) {
6928         return memoizedHashCode;
6929       }
6930       int hash = 41;
6931       hash = (19 * hash) + getDescriptorForType().hashCode();
6932       if (getQualifiersCount() > 0) {
6933         hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER;
6934         hash = (53 * hash) + getQualifiersList().hashCode();
6935       }
6936       hash = (29 * hash) + getUnknownFields().hashCode();
6937       memoizedHashCode = hash;
6938       return hash;
6939     }
6940 
parseFrom( com.google.protobuf.ByteString data)6941     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
6942         com.google.protobuf.ByteString data)
6943         throws com.google.protobuf.InvalidProtocolBufferException {
6944       return PARSER.parseFrom(data);
6945     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6946     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
6947         com.google.protobuf.ByteString data,
6948         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6949         throws com.google.protobuf.InvalidProtocolBufferException {
6950       return PARSER.parseFrom(data, extensionRegistry);
6951     }
parseFrom(byte[] data)6952     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data)
6953         throws com.google.protobuf.InvalidProtocolBufferException {
6954       return PARSER.parseFrom(data);
6955     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6956     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
6957         byte[] data,
6958         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6959         throws com.google.protobuf.InvalidProtocolBufferException {
6960       return PARSER.parseFrom(data, extensionRegistry);
6961     }
parseFrom(java.io.InputStream input)6962     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input)
6963         throws java.io.IOException {
6964       return PARSER.parseFrom(input);
6965     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6966     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
6967         java.io.InputStream input,
6968         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6969         throws java.io.IOException {
6970       return PARSER.parseFrom(input, extensionRegistry);
6971     }
parseDelimitedFrom(java.io.InputStream input)6972     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input)
6973         throws java.io.IOException {
6974       return PARSER.parseDelimitedFrom(input);
6975     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6976     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(
6977         java.io.InputStream input,
6978         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6979         throws java.io.IOException {
6980       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6981     }
parseFrom( com.google.protobuf.CodedInputStream input)6982     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
6983         com.google.protobuf.CodedInputStream input)
6984         throws java.io.IOException {
6985       return PARSER.parseFrom(input);
6986     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6987     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(
6988         com.google.protobuf.CodedInputStream input,
6989         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6990         throws java.io.IOException {
6991       return PARSER.parseFrom(input, extensionRegistry);
6992     }
6993 
newBuilder()6994     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6995     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype)6996     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) {
6997       return newBuilder().mergeFrom(prototype);
6998     }
toBuilder()6999     public Builder toBuilder() { return newBuilder(this); }
7000 
7001     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7002     protected Builder newBuilderForType(
7003         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7004       Builder builder = new Builder(parent);
7005       return builder;
7006     }
7007     /**
7008      * Protobuf type {@code FirstKeyValueMatchingQualifiersFilter}
7009      */
7010     public static final class Builder extends
7011         com.google.protobuf.GeneratedMessage.Builder<Builder>
7012        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder {
7013       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7014           getDescriptor() {
7015         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor;
7016       }
7017 
7018       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7019           internalGetFieldAccessorTable() {
7020         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable
7021             .ensureFieldAccessorsInitialized(
7022                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class);
7023       }
7024 
7025       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder()
Builder()7026       private Builder() {
7027         maybeForceBuilderInitialization();
7028       }
7029 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7030       private Builder(
7031           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7032         super(parent);
7033         maybeForceBuilderInitialization();
7034       }
maybeForceBuilderInitialization()7035       private void maybeForceBuilderInitialization() {
7036         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7037         }
7038       }
create()7039       private static Builder create() {
7040         return new Builder();
7041       }
7042 
clear()7043       public Builder clear() {
7044         super.clear();
7045         qualifiers_ = java.util.Collections.emptyList();
7046         bitField0_ = (bitField0_ & ~0x00000001);
7047         return this;
7048       }
7049 
clone()7050       public Builder clone() {
7051         return create().mergeFrom(buildPartial());
7052       }
7053 
7054       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7055           getDescriptorForType() {
7056         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor;
7057       }
7058 
getDefaultInstanceForType()7059       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() {
7060         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance();
7061       }
7062 
build()7063       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() {
7064         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial();
7065         if (!result.isInitialized()) {
7066           throw newUninitializedMessageException(result);
7067         }
7068         return result;
7069       }
7070 
buildPartial()7071       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() {
7072         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this);
7073         int from_bitField0_ = bitField0_;
7074         if (((bitField0_ & 0x00000001) == 0x00000001)) {
7075           qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_);
7076           bitField0_ = (bitField0_ & ~0x00000001);
7077         }
7078         result.qualifiers_ = qualifiers_;
7079         onBuilt();
7080         return result;
7081       }
7082 
mergeFrom(com.google.protobuf.Message other)7083       public Builder mergeFrom(com.google.protobuf.Message other) {
7084         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) {
7085           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other);
7086         } else {
7087           super.mergeFrom(other);
7088           return this;
7089         }
7090       }
7091 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other)7092       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) {
7093         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this;
7094         if (!other.qualifiers_.isEmpty()) {
7095           if (qualifiers_.isEmpty()) {
7096             qualifiers_ = other.qualifiers_;
7097             bitField0_ = (bitField0_ & ~0x00000001);
7098           } else {
7099             ensureQualifiersIsMutable();
7100             qualifiers_.addAll(other.qualifiers_);
7101           }
7102           onChanged();
7103         }
7104         this.mergeUnknownFields(other.getUnknownFields());
7105         return this;
7106       }
7107 
isInitialized()7108       public final boolean isInitialized() {
7109         return true;
7110       }
7111 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7112       public Builder mergeFrom(
7113           com.google.protobuf.CodedInputStream input,
7114           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7115           throws java.io.IOException {
7116         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parsedMessage = null;
7117         try {
7118           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7119         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7120           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) e.getUnfinishedMessage();
7121           throw e;
7122         } finally {
7123           if (parsedMessage != null) {
7124             mergeFrom(parsedMessage);
7125           }
7126         }
7127         return this;
7128       }
7129       private int bitField0_;
7130 
7131       // repeated bytes qualifiers = 1;
7132       private java.util.List<com.google.protobuf.ByteString> qualifiers_ = java.util.Collections.emptyList();
ensureQualifiersIsMutable()7133       private void ensureQualifiersIsMutable() {
7134         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
7135           qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifiers_);
7136           bitField0_ |= 0x00000001;
7137          }
7138       }
7139       /**
7140        * <code>repeated bytes qualifiers = 1;</code>
7141        */
7142       public java.util.List<com.google.protobuf.ByteString>
getQualifiersList()7143           getQualifiersList() {
7144         return java.util.Collections.unmodifiableList(qualifiers_);
7145       }
7146       /**
7147        * <code>repeated bytes qualifiers = 1;</code>
7148        */
getQualifiersCount()7149       public int getQualifiersCount() {
7150         return qualifiers_.size();
7151       }
7152       /**
7153        * <code>repeated bytes qualifiers = 1;</code>
7154        */
getQualifiers(int index)7155       public com.google.protobuf.ByteString getQualifiers(int index) {
7156         return qualifiers_.get(index);
7157       }
7158       /**
7159        * <code>repeated bytes qualifiers = 1;</code>
7160        */
setQualifiers( int index, com.google.protobuf.ByteString value)7161       public Builder setQualifiers(
7162           int index, com.google.protobuf.ByteString value) {
7163         if (value == null) {
7164     throw new NullPointerException();
7165   }
7166   ensureQualifiersIsMutable();
7167         qualifiers_.set(index, value);
7168         onChanged();
7169         return this;
7170       }
7171       /**
7172        * <code>repeated bytes qualifiers = 1;</code>
7173        */
addQualifiers(com.google.protobuf.ByteString value)7174       public Builder addQualifiers(com.google.protobuf.ByteString value) {
7175         if (value == null) {
7176     throw new NullPointerException();
7177   }
7178   ensureQualifiersIsMutable();
7179         qualifiers_.add(value);
7180         onChanged();
7181         return this;
7182       }
7183       /**
7184        * <code>repeated bytes qualifiers = 1;</code>
7185        */
addAllQualifiers( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)7186       public Builder addAllQualifiers(
7187           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
7188         ensureQualifiersIsMutable();
7189         super.addAll(values, qualifiers_);
7190         onChanged();
7191         return this;
7192       }
7193       /**
7194        * <code>repeated bytes qualifiers = 1;</code>
7195        */
clearQualifiers()7196       public Builder clearQualifiers() {
7197         qualifiers_ = java.util.Collections.emptyList();
7198         bitField0_ = (bitField0_ & ~0x00000001);
7199         onChanged();
7200         return this;
7201       }
7202 
7203       // @@protoc_insertion_point(builder_scope:FirstKeyValueMatchingQualifiersFilter)
7204     }
7205 
7206     static {
7207       defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true);
defaultInstance.initFields()7208       defaultInstance.initFields();
7209     }
7210 
7211     // @@protoc_insertion_point(class_scope:FirstKeyValueMatchingQualifiersFilter)
7212   }
7213 
7214   public interface FuzzyRowFilterOrBuilder
7215       extends com.google.protobuf.MessageOrBuilder {
7216 
7217     // repeated .BytesBytesPair fuzzy_keys_data = 1;
7218     /**
7219      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7220      */
7221     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>
getFuzzyKeysDataList()7222         getFuzzyKeysDataList();
7223     /**
7224      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7225      */
getFuzzyKeysData(int index)7226     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index);
7227     /**
7228      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7229      */
getFuzzyKeysDataCount()7230     int getFuzzyKeysDataCount();
7231     /**
7232      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7233      */
7234     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataOrBuilderList()7235         getFuzzyKeysDataOrBuilderList();
7236     /**
7237      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7238      */
getFuzzyKeysDataOrBuilder( int index)7239     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder(
7240         int index);
7241   }
7242   /**
7243    * Protobuf type {@code FuzzyRowFilter}
7244    */
7245   public static final class FuzzyRowFilter extends
7246       com.google.protobuf.GeneratedMessage
7247       implements FuzzyRowFilterOrBuilder {
7248     // Use FuzzyRowFilter.newBuilder() to construct.
FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)7249     private FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7250       super(builder);
7251       this.unknownFields = builder.getUnknownFields();
7252     }
FuzzyRowFilter(boolean noInit)7253     private FuzzyRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7254 
7255     private static final FuzzyRowFilter defaultInstance;
getDefaultInstance()7256     public static FuzzyRowFilter getDefaultInstance() {
7257       return defaultInstance;
7258     }
7259 
getDefaultInstanceForType()7260     public FuzzyRowFilter getDefaultInstanceForType() {
7261       return defaultInstance;
7262     }
7263 
7264     private final com.google.protobuf.UnknownFieldSet unknownFields;
7265     @java.lang.Override
7266     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7267         getUnknownFields() {
7268       return this.unknownFields;
7269     }
FuzzyRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7270     private FuzzyRowFilter(
7271         com.google.protobuf.CodedInputStream input,
7272         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7273         throws com.google.protobuf.InvalidProtocolBufferException {
7274       initFields();
7275       int mutable_bitField0_ = 0;
7276       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7277           com.google.protobuf.UnknownFieldSet.newBuilder();
7278       try {
7279         boolean done = false;
7280         while (!done) {
7281           int tag = input.readTag();
7282           switch (tag) {
7283             case 0:
7284               done = true;
7285               break;
7286             default: {
7287               if (!parseUnknownField(input, unknownFields,
7288                                      extensionRegistry, tag)) {
7289                 done = true;
7290               }
7291               break;
7292             }
7293             case 10: {
7294               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
7295                 fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>();
7296                 mutable_bitField0_ |= 0x00000001;
7297               }
7298               fuzzyKeysData_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
7299               break;
7300             }
7301           }
7302         }
7303       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7304         throw e.setUnfinishedMessage(this);
7305       } catch (java.io.IOException e) {
7306         throw new com.google.protobuf.InvalidProtocolBufferException(
7307             e.getMessage()).setUnfinishedMessage(this);
7308       } finally {
7309         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
7310           fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_);
7311         }
7312         this.unknownFields = unknownFields.build();
7313         makeExtensionsImmutable();
7314       }
7315     }
7316     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7317         getDescriptor() {
7318       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor;
7319     }
7320 
7321     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7322         internalGetFieldAccessorTable() {
7323       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable
7324           .ensureFieldAccessorsInitialized(
7325               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class);
7326     }
7327 
7328     public static com.google.protobuf.Parser<FuzzyRowFilter> PARSER =
7329         new com.google.protobuf.AbstractParser<FuzzyRowFilter>() {
7330       public FuzzyRowFilter parsePartialFrom(
7331           com.google.protobuf.CodedInputStream input,
7332           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7333           throws com.google.protobuf.InvalidProtocolBufferException {
7334         return new FuzzyRowFilter(input, extensionRegistry);
7335       }
7336     };
7337 
7338     @java.lang.Override
getParserForType()7339     public com.google.protobuf.Parser<FuzzyRowFilter> getParserForType() {
7340       return PARSER;
7341     }
7342 
7343     // repeated .BytesBytesPair fuzzy_keys_data = 1;
7344     public static final int FUZZY_KEYS_DATA_FIELD_NUMBER = 1;
7345     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_;
7346     /**
7347      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7348      */
getFuzzyKeysDataList()7349     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() {
7350       return fuzzyKeysData_;
7351     }
7352     /**
7353      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7354      */
7355     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataOrBuilderList()7356         getFuzzyKeysDataOrBuilderList() {
7357       return fuzzyKeysData_;
7358     }
7359     /**
7360      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7361      */
getFuzzyKeysDataCount()7362     public int getFuzzyKeysDataCount() {
7363       return fuzzyKeysData_.size();
7364     }
7365     /**
7366      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7367      */
getFuzzyKeysData(int index)7368     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) {
7369       return fuzzyKeysData_.get(index);
7370     }
7371     /**
7372      * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7373      */
getFuzzyKeysDataOrBuilder( int index)7374     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder(
7375         int index) {
7376       return fuzzyKeysData_.get(index);
7377     }
7378 
initFields()7379     private void initFields() {
7380       fuzzyKeysData_ = java.util.Collections.emptyList();
7381     }
7382     private byte memoizedIsInitialized = -1;
isInitialized()7383     public final boolean isInitialized() {
7384       byte isInitialized = memoizedIsInitialized;
7385       if (isInitialized != -1) return isInitialized == 1;
7386 
7387       for (int i = 0; i < getFuzzyKeysDataCount(); i++) {
7388         if (!getFuzzyKeysData(i).isInitialized()) {
7389           memoizedIsInitialized = 0;
7390           return false;
7391         }
7392       }
7393       memoizedIsInitialized = 1;
7394       return true;
7395     }
7396 
writeTo(com.google.protobuf.CodedOutputStream output)7397     public void writeTo(com.google.protobuf.CodedOutputStream output)
7398                         throws java.io.IOException {
7399       getSerializedSize();
7400       for (int i = 0; i < fuzzyKeysData_.size(); i++) {
7401         output.writeMessage(1, fuzzyKeysData_.get(i));
7402       }
7403       getUnknownFields().writeTo(output);
7404     }
7405 
7406     private int memoizedSerializedSize = -1;
getSerializedSize()7407     public int getSerializedSize() {
7408       int size = memoizedSerializedSize;
7409       if (size != -1) return size;
7410 
7411       size = 0;
7412       for (int i = 0; i < fuzzyKeysData_.size(); i++) {
7413         size += com.google.protobuf.CodedOutputStream
7414           .computeMessageSize(1, fuzzyKeysData_.get(i));
7415       }
7416       size += getUnknownFields().getSerializedSize();
7417       memoizedSerializedSize = size;
7418       return size;
7419     }
7420 
7421     private static final long serialVersionUID = 0L;
7422     @java.lang.Override
writeReplace()7423     protected java.lang.Object writeReplace()
7424         throws java.io.ObjectStreamException {
7425       return super.writeReplace();
7426     }
7427 
7428     @java.lang.Override
equals(final java.lang.Object obj)7429     public boolean equals(final java.lang.Object obj) {
7430       if (obj == this) {
7431        return true;
7432       }
7433       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)) {
7434         return super.equals(obj);
7435       }
7436       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj;
7437 
7438       boolean result = true;
7439       result = result && getFuzzyKeysDataList()
7440           .equals(other.getFuzzyKeysDataList());
7441       result = result &&
7442           getUnknownFields().equals(other.getUnknownFields());
7443       return result;
7444     }
7445 
7446     private int memoizedHashCode = 0;
7447     @java.lang.Override
hashCode()7448     public int hashCode() {
7449       if (memoizedHashCode != 0) {
7450         return memoizedHashCode;
7451       }
7452       int hash = 41;
7453       hash = (19 * hash) + getDescriptorForType().hashCode();
7454       if (getFuzzyKeysDataCount() > 0) {
7455         hash = (37 * hash) + FUZZY_KEYS_DATA_FIELD_NUMBER;
7456         hash = (53 * hash) + getFuzzyKeysDataList().hashCode();
7457       }
7458       hash = (29 * hash) + getUnknownFields().hashCode();
7459       memoizedHashCode = hash;
7460       return hash;
7461     }
7462 
parseFrom( com.google.protobuf.ByteString data)7463     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
7464         com.google.protobuf.ByteString data)
7465         throws com.google.protobuf.InvalidProtocolBufferException {
7466       return PARSER.parseFrom(data);
7467     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7468     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
7469         com.google.protobuf.ByteString data,
7470         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7471         throws com.google.protobuf.InvalidProtocolBufferException {
7472       return PARSER.parseFrom(data, extensionRegistry);
7473     }
parseFrom(byte[] data)7474     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data)
7475         throws com.google.protobuf.InvalidProtocolBufferException {
7476       return PARSER.parseFrom(data);
7477     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7478     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
7479         byte[] data,
7480         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7481         throws com.google.protobuf.InvalidProtocolBufferException {
7482       return PARSER.parseFrom(data, extensionRegistry);
7483     }
parseFrom(java.io.InputStream input)7484     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input)
7485         throws java.io.IOException {
7486       return PARSER.parseFrom(input);
7487     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7488     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
7489         java.io.InputStream input,
7490         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7491         throws java.io.IOException {
7492       return PARSER.parseFrom(input, extensionRegistry);
7493     }
parseDelimitedFrom(java.io.InputStream input)7494     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input)
7495         throws java.io.IOException {
7496       return PARSER.parseDelimitedFrom(input);
7497     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7498     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(
7499         java.io.InputStream input,
7500         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7501         throws java.io.IOException {
7502       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7503     }
parseFrom( com.google.protobuf.CodedInputStream input)7504     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
7505         com.google.protobuf.CodedInputStream input)
7506         throws java.io.IOException {
7507       return PARSER.parseFrom(input);
7508     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7509     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(
7510         com.google.protobuf.CodedInputStream input,
7511         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7512         throws java.io.IOException {
7513       return PARSER.parseFrom(input, extensionRegistry);
7514     }
7515 
newBuilder()7516     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()7517     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype)7518     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) {
7519       return newBuilder().mergeFrom(prototype);
7520     }
toBuilder()7521     public Builder toBuilder() { return newBuilder(this); }
7522 
7523     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7524     protected Builder newBuilderForType(
7525         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7526       Builder builder = new Builder(parent);
7527       return builder;
7528     }
7529     /**
7530      * Protobuf type {@code FuzzyRowFilter}
7531      */
7532     public static final class Builder extends
7533         com.google.protobuf.GeneratedMessage.Builder<Builder>
7534        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder {
7535       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7536           getDescriptor() {
7537         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor;
7538       }
7539 
7540       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7541           internalGetFieldAccessorTable() {
7542         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable
7543             .ensureFieldAccessorsInitialized(
7544                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class);
7545       }
7546 
7547       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder()
Builder()7548       private Builder() {
7549         maybeForceBuilderInitialization();
7550       }
7551 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7552       private Builder(
7553           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7554         super(parent);
7555         maybeForceBuilderInitialization();
7556       }
maybeForceBuilderInitialization()7557       private void maybeForceBuilderInitialization() {
7558         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7559           getFuzzyKeysDataFieldBuilder();
7560         }
7561       }
create()7562       private static Builder create() {
7563         return new Builder();
7564       }
7565 
clear()7566       public Builder clear() {
7567         super.clear();
7568         if (fuzzyKeysDataBuilder_ == null) {
7569           fuzzyKeysData_ = java.util.Collections.emptyList();
7570           bitField0_ = (bitField0_ & ~0x00000001);
7571         } else {
7572           fuzzyKeysDataBuilder_.clear();
7573         }
7574         return this;
7575       }
7576 
clone()7577       public Builder clone() {
7578         return create().mergeFrom(buildPartial());
7579       }
7580 
7581       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7582           getDescriptorForType() {
7583         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor;
7584       }
7585 
getDefaultInstanceForType()7586       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() {
7587         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance();
7588       }
7589 
build()7590       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() {
7591         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial();
7592         if (!result.isInitialized()) {
7593           throw newUninitializedMessageException(result);
7594         }
7595         return result;
7596       }
7597 
buildPartial()7598       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() {
7599         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this);
7600         int from_bitField0_ = bitField0_;
7601         if (fuzzyKeysDataBuilder_ == null) {
7602           if (((bitField0_ & 0x00000001) == 0x00000001)) {
7603             fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_);
7604             bitField0_ = (bitField0_ & ~0x00000001);
7605           }
7606           result.fuzzyKeysData_ = fuzzyKeysData_;
7607         } else {
7608           result.fuzzyKeysData_ = fuzzyKeysDataBuilder_.build();
7609         }
7610         onBuilt();
7611         return result;
7612       }
7613 
mergeFrom(com.google.protobuf.Message other)7614       public Builder mergeFrom(com.google.protobuf.Message other) {
7615         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) {
7616           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other);
7617         } else {
7618           super.mergeFrom(other);
7619           return this;
7620         }
7621       }
7622 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other)7623       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) {
7624         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this;
7625         if (fuzzyKeysDataBuilder_ == null) {
7626           if (!other.fuzzyKeysData_.isEmpty()) {
7627             if (fuzzyKeysData_.isEmpty()) {
7628               fuzzyKeysData_ = other.fuzzyKeysData_;
7629               bitField0_ = (bitField0_ & ~0x00000001);
7630             } else {
7631               ensureFuzzyKeysDataIsMutable();
7632               fuzzyKeysData_.addAll(other.fuzzyKeysData_);
7633             }
7634             onChanged();
7635           }
7636         } else {
7637           if (!other.fuzzyKeysData_.isEmpty()) {
7638             if (fuzzyKeysDataBuilder_.isEmpty()) {
7639               fuzzyKeysDataBuilder_.dispose();
7640               fuzzyKeysDataBuilder_ = null;
7641               fuzzyKeysData_ = other.fuzzyKeysData_;
7642               bitField0_ = (bitField0_ & ~0x00000001);
7643               fuzzyKeysDataBuilder_ =
7644                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
7645                    getFuzzyKeysDataFieldBuilder() : null;
7646             } else {
7647               fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_);
7648             }
7649           }
7650         }
7651         this.mergeUnknownFields(other.getUnknownFields());
7652         return this;
7653       }
7654 
isInitialized()7655       public final boolean isInitialized() {
7656         for (int i = 0; i < getFuzzyKeysDataCount(); i++) {
7657           if (!getFuzzyKeysData(i).isInitialized()) {
7658 
7659             return false;
7660           }
7661         }
7662         return true;
7663       }
7664 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7665       public Builder mergeFrom(
7666           com.google.protobuf.CodedInputStream input,
7667           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7668           throws java.io.IOException {
7669         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parsedMessage = null;
7670         try {
7671           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7672         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7673           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) e.getUnfinishedMessage();
7674           throw e;
7675         } finally {
7676           if (parsedMessage != null) {
7677             mergeFrom(parsedMessage);
7678           }
7679         }
7680         return this;
7681       }
7682       private int bitField0_;
7683 
7684       // repeated .BytesBytesPair fuzzy_keys_data = 1;
7685       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_ =
7686         java.util.Collections.emptyList();
ensureFuzzyKeysDataIsMutable()7687       private void ensureFuzzyKeysDataIsMutable() {
7688         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
7689           fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(fuzzyKeysData_);
7690           bitField0_ |= 0x00000001;
7691          }
7692       }
7693 
7694       private com.google.protobuf.RepeatedFieldBuilder<
7695           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_;
7696 
7697       /**
7698        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7699        */
getFuzzyKeysDataList()7700       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() {
7701         if (fuzzyKeysDataBuilder_ == null) {
7702           return java.util.Collections.unmodifiableList(fuzzyKeysData_);
7703         } else {
7704           return fuzzyKeysDataBuilder_.getMessageList();
7705         }
7706       }
7707       /**
7708        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7709        */
getFuzzyKeysDataCount()7710       public int getFuzzyKeysDataCount() {
7711         if (fuzzyKeysDataBuilder_ == null) {
7712           return fuzzyKeysData_.size();
7713         } else {
7714           return fuzzyKeysDataBuilder_.getCount();
7715         }
7716       }
7717       /**
7718        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7719        */
getFuzzyKeysData(int index)7720       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) {
7721         if (fuzzyKeysDataBuilder_ == null) {
7722           return fuzzyKeysData_.get(index);
7723         } else {
7724           return fuzzyKeysDataBuilder_.getMessage(index);
7725         }
7726       }
7727       /**
7728        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7729        */
setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)7730       public Builder setFuzzyKeysData(
7731           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
7732         if (fuzzyKeysDataBuilder_ == null) {
7733           if (value == null) {
7734             throw new NullPointerException();
7735           }
7736           ensureFuzzyKeysDataIsMutable();
7737           fuzzyKeysData_.set(index, value);
7738           onChanged();
7739         } else {
7740           fuzzyKeysDataBuilder_.setMessage(index, value);
7741         }
7742         return this;
7743       }
7744       /**
7745        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7746        */
setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)7747       public Builder setFuzzyKeysData(
7748           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
7749         if (fuzzyKeysDataBuilder_ == null) {
7750           ensureFuzzyKeysDataIsMutable();
7751           fuzzyKeysData_.set(index, builderForValue.build());
7752           onChanged();
7753         } else {
7754           fuzzyKeysDataBuilder_.setMessage(index, builderForValue.build());
7755         }
7756         return this;
7757       }
7758       /**
7759        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7760        */
addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)7761       public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
7762         if (fuzzyKeysDataBuilder_ == null) {
7763           if (value == null) {
7764             throw new NullPointerException();
7765           }
7766           ensureFuzzyKeysDataIsMutable();
7767           fuzzyKeysData_.add(value);
7768           onChanged();
7769         } else {
7770           fuzzyKeysDataBuilder_.addMessage(value);
7771         }
7772         return this;
7773       }
7774       /**
7775        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7776        */
addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)7777       public Builder addFuzzyKeysData(
7778           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
7779         if (fuzzyKeysDataBuilder_ == null) {
7780           if (value == null) {
7781             throw new NullPointerException();
7782           }
7783           ensureFuzzyKeysDataIsMutable();
7784           fuzzyKeysData_.add(index, value);
7785           onChanged();
7786         } else {
7787           fuzzyKeysDataBuilder_.addMessage(index, value);
7788         }
7789         return this;
7790       }
7791       /**
7792        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7793        */
addFuzzyKeysData( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)7794       public Builder addFuzzyKeysData(
7795           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
7796         if (fuzzyKeysDataBuilder_ == null) {
7797           ensureFuzzyKeysDataIsMutable();
7798           fuzzyKeysData_.add(builderForValue.build());
7799           onChanged();
7800         } else {
7801           fuzzyKeysDataBuilder_.addMessage(builderForValue.build());
7802         }
7803         return this;
7804       }
7805       /**
7806        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7807        */
addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)7808       public Builder addFuzzyKeysData(
7809           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
7810         if (fuzzyKeysDataBuilder_ == null) {
7811           ensureFuzzyKeysDataIsMutable();
7812           fuzzyKeysData_.add(index, builderForValue.build());
7813           onChanged();
7814         } else {
7815           fuzzyKeysDataBuilder_.addMessage(index, builderForValue.build());
7816         }
7817         return this;
7818       }
7819       /**
7820        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7821        */
addAllFuzzyKeysData( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values)7822       public Builder addAllFuzzyKeysData(
7823           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) {
7824         if (fuzzyKeysDataBuilder_ == null) {
7825           ensureFuzzyKeysDataIsMutable();
7826           super.addAll(values, fuzzyKeysData_);
7827           onChanged();
7828         } else {
7829           fuzzyKeysDataBuilder_.addAllMessages(values);
7830         }
7831         return this;
7832       }
7833       /**
7834        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7835        */
clearFuzzyKeysData()7836       public Builder clearFuzzyKeysData() {
7837         if (fuzzyKeysDataBuilder_ == null) {
7838           fuzzyKeysData_ = java.util.Collections.emptyList();
7839           bitField0_ = (bitField0_ & ~0x00000001);
7840           onChanged();
7841         } else {
7842           fuzzyKeysDataBuilder_.clear();
7843         }
7844         return this;
7845       }
7846       /**
7847        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7848        */
removeFuzzyKeysData(int index)7849       public Builder removeFuzzyKeysData(int index) {
7850         if (fuzzyKeysDataBuilder_ == null) {
7851           ensureFuzzyKeysDataIsMutable();
7852           fuzzyKeysData_.remove(index);
7853           onChanged();
7854         } else {
7855           fuzzyKeysDataBuilder_.remove(index);
7856         }
7857         return this;
7858       }
7859       /**
7860        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7861        */
getFuzzyKeysDataBuilder( int index)7862       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder(
7863           int index) {
7864         return getFuzzyKeysDataFieldBuilder().getBuilder(index);
7865       }
7866       /**
7867        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7868        */
getFuzzyKeysDataOrBuilder( int index)7869       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder(
7870           int index) {
7871         if (fuzzyKeysDataBuilder_ == null) {
7872           return fuzzyKeysData_.get(index);  } else {
7873           return fuzzyKeysDataBuilder_.getMessageOrBuilder(index);
7874         }
7875       }
7876       /**
7877        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7878        */
7879       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataOrBuilderList()7880            getFuzzyKeysDataOrBuilderList() {
7881         if (fuzzyKeysDataBuilder_ != null) {
7882           return fuzzyKeysDataBuilder_.getMessageOrBuilderList();
7883         } else {
7884           return java.util.Collections.unmodifiableList(fuzzyKeysData_);
7885         }
7886       }
7887       /**
7888        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7889        */
addFuzzyKeysDataBuilder()7890       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() {
7891         return getFuzzyKeysDataFieldBuilder().addBuilder(
7892             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
7893       }
7894       /**
7895        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7896        */
addFuzzyKeysDataBuilder( int index)7897       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder(
7898           int index) {
7899         return getFuzzyKeysDataFieldBuilder().addBuilder(
7900             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
7901       }
7902       /**
7903        * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code>
7904        */
7905       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder>
getFuzzyKeysDataBuilderList()7906            getFuzzyKeysDataBuilderList() {
7907         return getFuzzyKeysDataFieldBuilder().getBuilderList();
7908       }
7909       private com.google.protobuf.RepeatedFieldBuilder<
7910           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getFuzzyKeysDataFieldBuilder()7911           getFuzzyKeysDataFieldBuilder() {
7912         if (fuzzyKeysDataBuilder_ == null) {
7913           fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
7914               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>(
7915                   fuzzyKeysData_,
7916                   ((bitField0_ & 0x00000001) == 0x00000001),
7917                   getParentForChildren(),
7918                   isClean());
7919           fuzzyKeysData_ = null;
7920         }
7921         return fuzzyKeysDataBuilder_;
7922       }
7923 
7924       // @@protoc_insertion_point(builder_scope:FuzzyRowFilter)
7925     }
7926 
7927     static {
7928       defaultInstance = new FuzzyRowFilter(true);
defaultInstance.initFields()7929       defaultInstance.initFields();
7930     }
7931 
7932     // @@protoc_insertion_point(class_scope:FuzzyRowFilter)
7933   }
7934 
7935   public interface InclusiveStopFilterOrBuilder
7936       extends com.google.protobuf.MessageOrBuilder {
7937 
7938     // optional bytes stop_row_key = 1;
7939     /**
7940      * <code>optional bytes stop_row_key = 1;</code>
7941      */
hasStopRowKey()7942     boolean hasStopRowKey();
7943     /**
7944      * <code>optional bytes stop_row_key = 1;</code>
7945      */
getStopRowKey()7946     com.google.protobuf.ByteString getStopRowKey();
7947   }
7948   /**
7949    * Protobuf type {@code InclusiveStopFilter}
7950    */
7951   public static final class InclusiveStopFilter extends
7952       com.google.protobuf.GeneratedMessage
7953       implements InclusiveStopFilterOrBuilder {
7954     // Use InclusiveStopFilter.newBuilder() to construct.
InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)7955     private InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7956       super(builder);
7957       this.unknownFields = builder.getUnknownFields();
7958     }
InclusiveStopFilter(boolean noInit)7959     private InclusiveStopFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7960 
7961     private static final InclusiveStopFilter defaultInstance;
getDefaultInstance()7962     public static InclusiveStopFilter getDefaultInstance() {
7963       return defaultInstance;
7964     }
7965 
getDefaultInstanceForType()7966     public InclusiveStopFilter getDefaultInstanceForType() {
7967       return defaultInstance;
7968     }
7969 
7970     private final com.google.protobuf.UnknownFieldSet unknownFields;
7971     @java.lang.Override
7972     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7973         getUnknownFields() {
7974       return this.unknownFields;
7975     }
InclusiveStopFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7976     private InclusiveStopFilter(
7977         com.google.protobuf.CodedInputStream input,
7978         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7979         throws com.google.protobuf.InvalidProtocolBufferException {
7980       initFields();
7981       int mutable_bitField0_ = 0;
7982       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7983           com.google.protobuf.UnknownFieldSet.newBuilder();
7984       try {
7985         boolean done = false;
7986         while (!done) {
7987           int tag = input.readTag();
7988           switch (tag) {
7989             case 0:
7990               done = true;
7991               break;
7992             default: {
7993               if (!parseUnknownField(input, unknownFields,
7994                                      extensionRegistry, tag)) {
7995                 done = true;
7996               }
7997               break;
7998             }
7999             case 10: {
8000               bitField0_ |= 0x00000001;
8001               stopRowKey_ = input.readBytes();
8002               break;
8003             }
8004           }
8005         }
8006       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8007         throw e.setUnfinishedMessage(this);
8008       } catch (java.io.IOException e) {
8009         throw new com.google.protobuf.InvalidProtocolBufferException(
8010             e.getMessage()).setUnfinishedMessage(this);
8011       } finally {
8012         this.unknownFields = unknownFields.build();
8013         makeExtensionsImmutable();
8014       }
8015     }
8016     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8017         getDescriptor() {
8018       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor;
8019     }
8020 
8021     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8022         internalGetFieldAccessorTable() {
8023       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable
8024           .ensureFieldAccessorsInitialized(
8025               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class);
8026     }
8027 
8028     public static com.google.protobuf.Parser<InclusiveStopFilter> PARSER =
8029         new com.google.protobuf.AbstractParser<InclusiveStopFilter>() {
8030       public InclusiveStopFilter parsePartialFrom(
8031           com.google.protobuf.CodedInputStream input,
8032           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8033           throws com.google.protobuf.InvalidProtocolBufferException {
8034         return new InclusiveStopFilter(input, extensionRegistry);
8035       }
8036     };
8037 
8038     @java.lang.Override
getParserForType()8039     public com.google.protobuf.Parser<InclusiveStopFilter> getParserForType() {
8040       return PARSER;
8041     }
8042 
8043     private int bitField0_;
8044     // optional bytes stop_row_key = 1;
8045     public static final int STOP_ROW_KEY_FIELD_NUMBER = 1;
8046     private com.google.protobuf.ByteString stopRowKey_;
8047     /**
8048      * <code>optional bytes stop_row_key = 1;</code>
8049      */
hasStopRowKey()8050     public boolean hasStopRowKey() {
8051       return ((bitField0_ & 0x00000001) == 0x00000001);
8052     }
8053     /**
8054      * <code>optional bytes stop_row_key = 1;</code>
8055      */
getStopRowKey()8056     public com.google.protobuf.ByteString getStopRowKey() {
8057       return stopRowKey_;
8058     }
8059 
initFields()8060     private void initFields() {
8061       stopRowKey_ = com.google.protobuf.ByteString.EMPTY;
8062     }
8063     private byte memoizedIsInitialized = -1;
isInitialized()8064     public final boolean isInitialized() {
8065       byte isInitialized = memoizedIsInitialized;
8066       if (isInitialized != -1) return isInitialized == 1;
8067 
8068       memoizedIsInitialized = 1;
8069       return true;
8070     }
8071 
writeTo(com.google.protobuf.CodedOutputStream output)8072     public void writeTo(com.google.protobuf.CodedOutputStream output)
8073                         throws java.io.IOException {
8074       getSerializedSize();
8075       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8076         output.writeBytes(1, stopRowKey_);
8077       }
8078       getUnknownFields().writeTo(output);
8079     }
8080 
8081     private int memoizedSerializedSize = -1;
getSerializedSize()8082     public int getSerializedSize() {
8083       int size = memoizedSerializedSize;
8084       if (size != -1) return size;
8085 
8086       size = 0;
8087       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8088         size += com.google.protobuf.CodedOutputStream
8089           .computeBytesSize(1, stopRowKey_);
8090       }
8091       size += getUnknownFields().getSerializedSize();
8092       memoizedSerializedSize = size;
8093       return size;
8094     }
8095 
8096     private static final long serialVersionUID = 0L;
8097     @java.lang.Override
writeReplace()8098     protected java.lang.Object writeReplace()
8099         throws java.io.ObjectStreamException {
8100       return super.writeReplace();
8101     }
8102 
8103     @java.lang.Override
equals(final java.lang.Object obj)8104     public boolean equals(final java.lang.Object obj) {
8105       if (obj == this) {
8106        return true;
8107       }
8108       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)) {
8109         return super.equals(obj);
8110       }
8111       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj;
8112 
8113       boolean result = true;
8114       result = result && (hasStopRowKey() == other.hasStopRowKey());
8115       if (hasStopRowKey()) {
8116         result = result && getStopRowKey()
8117             .equals(other.getStopRowKey());
8118       }
8119       result = result &&
8120           getUnknownFields().equals(other.getUnknownFields());
8121       return result;
8122     }
8123 
8124     private int memoizedHashCode = 0;
8125     @java.lang.Override
hashCode()8126     public int hashCode() {
8127       if (memoizedHashCode != 0) {
8128         return memoizedHashCode;
8129       }
8130       int hash = 41;
8131       hash = (19 * hash) + getDescriptorForType().hashCode();
8132       if (hasStopRowKey()) {
8133         hash = (37 * hash) + STOP_ROW_KEY_FIELD_NUMBER;
8134         hash = (53 * hash) + getStopRowKey().hashCode();
8135       }
8136       hash = (29 * hash) + getUnknownFields().hashCode();
8137       memoizedHashCode = hash;
8138       return hash;
8139     }
8140 
parseFrom( com.google.protobuf.ByteString data)8141     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
8142         com.google.protobuf.ByteString data)
8143         throws com.google.protobuf.InvalidProtocolBufferException {
8144       return PARSER.parseFrom(data);
8145     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8146     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
8147         com.google.protobuf.ByteString data,
8148         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8149         throws com.google.protobuf.InvalidProtocolBufferException {
8150       return PARSER.parseFrom(data, extensionRegistry);
8151     }
parseFrom(byte[] data)8152     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data)
8153         throws com.google.protobuf.InvalidProtocolBufferException {
8154       return PARSER.parseFrom(data);
8155     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8156     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
8157         byte[] data,
8158         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8159         throws com.google.protobuf.InvalidProtocolBufferException {
8160       return PARSER.parseFrom(data, extensionRegistry);
8161     }
parseFrom(java.io.InputStream input)8162     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input)
8163         throws java.io.IOException {
8164       return PARSER.parseFrom(input);
8165     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8166     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
8167         java.io.InputStream input,
8168         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8169         throws java.io.IOException {
8170       return PARSER.parseFrom(input, extensionRegistry);
8171     }
parseDelimitedFrom(java.io.InputStream input)8172     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input)
8173         throws java.io.IOException {
8174       return PARSER.parseDelimitedFrom(input);
8175     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8176     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(
8177         java.io.InputStream input,
8178         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8179         throws java.io.IOException {
8180       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8181     }
parseFrom( com.google.protobuf.CodedInputStream input)8182     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
8183         com.google.protobuf.CodedInputStream input)
8184         throws java.io.IOException {
8185       return PARSER.parseFrom(input);
8186     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8187     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(
8188         com.google.protobuf.CodedInputStream input,
8189         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8190         throws java.io.IOException {
8191       return PARSER.parseFrom(input, extensionRegistry);
8192     }
8193 
newBuilder()8194     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()8195     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype)8196     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) {
8197       return newBuilder().mergeFrom(prototype);
8198     }
toBuilder()8199     public Builder toBuilder() { return newBuilder(this); }
8200 
8201     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8202     protected Builder newBuilderForType(
8203         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8204       Builder builder = new Builder(parent);
8205       return builder;
8206     }
8207     /**
8208      * Protobuf type {@code InclusiveStopFilter}
8209      */
8210     public static final class Builder extends
8211         com.google.protobuf.GeneratedMessage.Builder<Builder>
8212        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder {
8213       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8214           getDescriptor() {
8215         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor;
8216       }
8217 
8218       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8219           internalGetFieldAccessorTable() {
8220         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable
8221             .ensureFieldAccessorsInitialized(
8222                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class);
8223       }
8224 
8225       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder()
Builder()8226       private Builder() {
8227         maybeForceBuilderInitialization();
8228       }
8229 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8230       private Builder(
8231           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8232         super(parent);
8233         maybeForceBuilderInitialization();
8234       }
maybeForceBuilderInitialization()8235       private void maybeForceBuilderInitialization() {
8236         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8237         }
8238       }
create()8239       private static Builder create() {
8240         return new Builder();
8241       }
8242 
clear()8243       public Builder clear() {
8244         super.clear();
8245         stopRowKey_ = com.google.protobuf.ByteString.EMPTY;
8246         bitField0_ = (bitField0_ & ~0x00000001);
8247         return this;
8248       }
8249 
clone()8250       public Builder clone() {
8251         return create().mergeFrom(buildPartial());
8252       }
8253 
8254       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()8255           getDescriptorForType() {
8256         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor;
8257       }
8258 
getDefaultInstanceForType()8259       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() {
8260         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance();
8261       }
8262 
build()8263       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() {
8264         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial();
8265         if (!result.isInitialized()) {
8266           throw newUninitializedMessageException(result);
8267         }
8268         return result;
8269       }
8270 
buildPartial()8271       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() {
8272         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this);
8273         int from_bitField0_ = bitField0_;
8274         int to_bitField0_ = 0;
8275         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8276           to_bitField0_ |= 0x00000001;
8277         }
8278         result.stopRowKey_ = stopRowKey_;
8279         result.bitField0_ = to_bitField0_;
8280         onBuilt();
8281         return result;
8282       }
8283 
mergeFrom(com.google.protobuf.Message other)8284       public Builder mergeFrom(com.google.protobuf.Message other) {
8285         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) {
8286           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other);
8287         } else {
8288           super.mergeFrom(other);
8289           return this;
8290         }
8291       }
8292 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other)8293       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) {
8294         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this;
8295         if (other.hasStopRowKey()) {
8296           setStopRowKey(other.getStopRowKey());
8297         }
8298         this.mergeUnknownFields(other.getUnknownFields());
8299         return this;
8300       }
8301 
isInitialized()8302       public final boolean isInitialized() {
8303         return true;
8304       }
8305 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8306       public Builder mergeFrom(
8307           com.google.protobuf.CodedInputStream input,
8308           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8309           throws java.io.IOException {
8310         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parsedMessage = null;
8311         try {
8312           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8313         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8314           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) e.getUnfinishedMessage();
8315           throw e;
8316         } finally {
8317           if (parsedMessage != null) {
8318             mergeFrom(parsedMessage);
8319           }
8320         }
8321         return this;
8322       }
8323       private int bitField0_;
8324 
8325       // optional bytes stop_row_key = 1;
8326       private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY;
8327       /**
8328        * <code>optional bytes stop_row_key = 1;</code>
8329        */
hasStopRowKey()8330       public boolean hasStopRowKey() {
8331         return ((bitField0_ & 0x00000001) == 0x00000001);
8332       }
8333       /**
8334        * <code>optional bytes stop_row_key = 1;</code>
8335        */
getStopRowKey()8336       public com.google.protobuf.ByteString getStopRowKey() {
8337         return stopRowKey_;
8338       }
8339       /**
8340        * <code>optional bytes stop_row_key = 1;</code>
8341        */
setStopRowKey(com.google.protobuf.ByteString value)8342       public Builder setStopRowKey(com.google.protobuf.ByteString value) {
8343         if (value == null) {
8344     throw new NullPointerException();
8345   }
8346   bitField0_ |= 0x00000001;
8347         stopRowKey_ = value;
8348         onChanged();
8349         return this;
8350       }
8351       /**
8352        * <code>optional bytes stop_row_key = 1;</code>
8353        */
clearStopRowKey()8354       public Builder clearStopRowKey() {
8355         bitField0_ = (bitField0_ & ~0x00000001);
8356         stopRowKey_ = getDefaultInstance().getStopRowKey();
8357         onChanged();
8358         return this;
8359       }
8360 
8361       // @@protoc_insertion_point(builder_scope:InclusiveStopFilter)
8362     }
8363 
8364     static {
8365       defaultInstance = new InclusiveStopFilter(true);
defaultInstance.initFields()8366       defaultInstance.initFields();
8367     }
8368 
8369     // @@protoc_insertion_point(class_scope:InclusiveStopFilter)
8370   }
8371 
8372   public interface KeyOnlyFilterOrBuilder
8373       extends com.google.protobuf.MessageOrBuilder {
8374 
8375     // required bool len_as_val = 1;
8376     /**
8377      * <code>required bool len_as_val = 1;</code>
8378      */
hasLenAsVal()8379     boolean hasLenAsVal();
8380     /**
8381      * <code>required bool len_as_val = 1;</code>
8382      */
getLenAsVal()8383     boolean getLenAsVal();
8384   }
8385   /**
8386    * Protobuf type {@code KeyOnlyFilter}
8387    */
8388   public static final class KeyOnlyFilter extends
8389       com.google.protobuf.GeneratedMessage
8390       implements KeyOnlyFilterOrBuilder {
8391     // Use KeyOnlyFilter.newBuilder() to construct.
KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)8392     private KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8393       super(builder);
8394       this.unknownFields = builder.getUnknownFields();
8395     }
KeyOnlyFilter(boolean noInit)8396     private KeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8397 
8398     private static final KeyOnlyFilter defaultInstance;
getDefaultInstance()8399     public static KeyOnlyFilter getDefaultInstance() {
8400       return defaultInstance;
8401     }
8402 
getDefaultInstanceForType()8403     public KeyOnlyFilter getDefaultInstanceForType() {
8404       return defaultInstance;
8405     }
8406 
8407     private final com.google.protobuf.UnknownFieldSet unknownFields;
8408     @java.lang.Override
8409     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8410         getUnknownFields() {
8411       return this.unknownFields;
8412     }
KeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8413     private KeyOnlyFilter(
8414         com.google.protobuf.CodedInputStream input,
8415         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8416         throws com.google.protobuf.InvalidProtocolBufferException {
8417       initFields();
8418       int mutable_bitField0_ = 0;
8419       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8420           com.google.protobuf.UnknownFieldSet.newBuilder();
8421       try {
8422         boolean done = false;
8423         while (!done) {
8424           int tag = input.readTag();
8425           switch (tag) {
8426             case 0:
8427               done = true;
8428               break;
8429             default: {
8430               if (!parseUnknownField(input, unknownFields,
8431                                      extensionRegistry, tag)) {
8432                 done = true;
8433               }
8434               break;
8435             }
8436             case 8: {
8437               bitField0_ |= 0x00000001;
8438               lenAsVal_ = input.readBool();
8439               break;
8440             }
8441           }
8442         }
8443       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8444         throw e.setUnfinishedMessage(this);
8445       } catch (java.io.IOException e) {
8446         throw new com.google.protobuf.InvalidProtocolBufferException(
8447             e.getMessage()).setUnfinishedMessage(this);
8448       } finally {
8449         this.unknownFields = unknownFields.build();
8450         makeExtensionsImmutable();
8451       }
8452     }
8453     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8454         getDescriptor() {
8455       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor;
8456     }
8457 
8458     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8459         internalGetFieldAccessorTable() {
8460       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable
8461           .ensureFieldAccessorsInitialized(
8462               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class);
8463     }
8464 
8465     public static com.google.protobuf.Parser<KeyOnlyFilter> PARSER =
8466         new com.google.protobuf.AbstractParser<KeyOnlyFilter>() {
8467       public KeyOnlyFilter parsePartialFrom(
8468           com.google.protobuf.CodedInputStream input,
8469           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8470           throws com.google.protobuf.InvalidProtocolBufferException {
8471         return new KeyOnlyFilter(input, extensionRegistry);
8472       }
8473     };
8474 
8475     @java.lang.Override
getParserForType()8476     public com.google.protobuf.Parser<KeyOnlyFilter> getParserForType() {
8477       return PARSER;
8478     }
8479 
8480     private int bitField0_;
8481     // required bool len_as_val = 1;
8482     public static final int LEN_AS_VAL_FIELD_NUMBER = 1;
8483     private boolean lenAsVal_;
8484     /**
8485      * <code>required bool len_as_val = 1;</code>
8486      */
hasLenAsVal()8487     public boolean hasLenAsVal() {
8488       return ((bitField0_ & 0x00000001) == 0x00000001);
8489     }
8490     /**
8491      * <code>required bool len_as_val = 1;</code>
8492      */
getLenAsVal()8493     public boolean getLenAsVal() {
8494       return lenAsVal_;
8495     }
8496 
initFields()8497     private void initFields() {
8498       lenAsVal_ = false;
8499     }
8500     private byte memoizedIsInitialized = -1;
isInitialized()8501     public final boolean isInitialized() {
8502       byte isInitialized = memoizedIsInitialized;
8503       if (isInitialized != -1) return isInitialized == 1;
8504 
8505       if (!hasLenAsVal()) {
8506         memoizedIsInitialized = 0;
8507         return false;
8508       }
8509       memoizedIsInitialized = 1;
8510       return true;
8511     }
8512 
writeTo(com.google.protobuf.CodedOutputStream output)8513     public void writeTo(com.google.protobuf.CodedOutputStream output)
8514                         throws java.io.IOException {
8515       getSerializedSize();
8516       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8517         output.writeBool(1, lenAsVal_);
8518       }
8519       getUnknownFields().writeTo(output);
8520     }
8521 
8522     private int memoizedSerializedSize = -1;
getSerializedSize()8523     public int getSerializedSize() {
8524       int size = memoizedSerializedSize;
8525       if (size != -1) return size;
8526 
8527       size = 0;
8528       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8529         size += com.google.protobuf.CodedOutputStream
8530           .computeBoolSize(1, lenAsVal_);
8531       }
8532       size += getUnknownFields().getSerializedSize();
8533       memoizedSerializedSize = size;
8534       return size;
8535     }
8536 
8537     private static final long serialVersionUID = 0L;
8538     @java.lang.Override
writeReplace()8539     protected java.lang.Object writeReplace()
8540         throws java.io.ObjectStreamException {
8541       return super.writeReplace();
8542     }
8543 
8544     @java.lang.Override
equals(final java.lang.Object obj)8545     public boolean equals(final java.lang.Object obj) {
8546       if (obj == this) {
8547        return true;
8548       }
8549       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)) {
8550         return super.equals(obj);
8551       }
8552       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj;
8553 
8554       boolean result = true;
8555       result = result && (hasLenAsVal() == other.hasLenAsVal());
8556       if (hasLenAsVal()) {
8557         result = result && (getLenAsVal()
8558             == other.getLenAsVal());
8559       }
8560       result = result &&
8561           getUnknownFields().equals(other.getUnknownFields());
8562       return result;
8563     }
8564 
8565     private int memoizedHashCode = 0;
8566     @java.lang.Override
hashCode()8567     public int hashCode() {
8568       if (memoizedHashCode != 0) {
8569         return memoizedHashCode;
8570       }
8571       int hash = 41;
8572       hash = (19 * hash) + getDescriptorForType().hashCode();
8573       if (hasLenAsVal()) {
8574         hash = (37 * hash) + LEN_AS_VAL_FIELD_NUMBER;
8575         hash = (53 * hash) + hashBoolean(getLenAsVal());
8576       }
8577       hash = (29 * hash) + getUnknownFields().hashCode();
8578       memoizedHashCode = hash;
8579       return hash;
8580     }
8581 
parseFrom( com.google.protobuf.ByteString data)8582     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
8583         com.google.protobuf.ByteString data)
8584         throws com.google.protobuf.InvalidProtocolBufferException {
8585       return PARSER.parseFrom(data);
8586     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8587     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
8588         com.google.protobuf.ByteString data,
8589         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8590         throws com.google.protobuf.InvalidProtocolBufferException {
8591       return PARSER.parseFrom(data, extensionRegistry);
8592     }
parseFrom(byte[] data)8593     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data)
8594         throws com.google.protobuf.InvalidProtocolBufferException {
8595       return PARSER.parseFrom(data);
8596     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8597     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
8598         byte[] data,
8599         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8600         throws com.google.protobuf.InvalidProtocolBufferException {
8601       return PARSER.parseFrom(data, extensionRegistry);
8602     }
parseFrom(java.io.InputStream input)8603     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input)
8604         throws java.io.IOException {
8605       return PARSER.parseFrom(input);
8606     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8607     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
8608         java.io.InputStream input,
8609         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8610         throws java.io.IOException {
8611       return PARSER.parseFrom(input, extensionRegistry);
8612     }
parseDelimitedFrom(java.io.InputStream input)8613     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input)
8614         throws java.io.IOException {
8615       return PARSER.parseDelimitedFrom(input);
8616     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8617     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(
8618         java.io.InputStream input,
8619         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8620         throws java.io.IOException {
8621       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8622     }
parseFrom( com.google.protobuf.CodedInputStream input)8623     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
8624         com.google.protobuf.CodedInputStream input)
8625         throws java.io.IOException {
8626       return PARSER.parseFrom(input);
8627     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8628     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(
8629         com.google.protobuf.CodedInputStream input,
8630         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8631         throws java.io.IOException {
8632       return PARSER.parseFrom(input, extensionRegistry);
8633     }
8634 
newBuilder()8635     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()8636     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype)8637     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) {
8638       return newBuilder().mergeFrom(prototype);
8639     }
toBuilder()8640     public Builder toBuilder() { return newBuilder(this); }
8641 
8642     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8643     protected Builder newBuilderForType(
8644         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8645       Builder builder = new Builder(parent);
8646       return builder;
8647     }
8648     /**
8649      * Protobuf type {@code KeyOnlyFilter}
8650      */
8651     public static final class Builder extends
8652         com.google.protobuf.GeneratedMessage.Builder<Builder>
8653        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder {
8654       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8655           getDescriptor() {
8656         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor;
8657       }
8658 
8659       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8660           internalGetFieldAccessorTable() {
8661         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable
8662             .ensureFieldAccessorsInitialized(
8663                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class);
8664       }
8665 
8666       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder()
Builder()8667       private Builder() {
8668         maybeForceBuilderInitialization();
8669       }
8670 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8671       private Builder(
8672           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8673         super(parent);
8674         maybeForceBuilderInitialization();
8675       }
maybeForceBuilderInitialization()8676       private void maybeForceBuilderInitialization() {
8677         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8678         }
8679       }
create()8680       private static Builder create() {
8681         return new Builder();
8682       }
8683 
clear()8684       public Builder clear() {
8685         super.clear();
8686         lenAsVal_ = false;
8687         bitField0_ = (bitField0_ & ~0x00000001);
8688         return this;
8689       }
8690 
clone()8691       public Builder clone() {
8692         return create().mergeFrom(buildPartial());
8693       }
8694 
8695       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()8696           getDescriptorForType() {
8697         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor;
8698       }
8699 
getDefaultInstanceForType()8700       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() {
8701         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance();
8702       }
8703 
build()8704       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() {
8705         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial();
8706         if (!result.isInitialized()) {
8707           throw newUninitializedMessageException(result);
8708         }
8709         return result;
8710       }
8711 
buildPartial()8712       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() {
8713         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this);
8714         int from_bitField0_ = bitField0_;
8715         int to_bitField0_ = 0;
8716         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8717           to_bitField0_ |= 0x00000001;
8718         }
8719         result.lenAsVal_ = lenAsVal_;
8720         result.bitField0_ = to_bitField0_;
8721         onBuilt();
8722         return result;
8723       }
8724 
mergeFrom(com.google.protobuf.Message other)8725       public Builder mergeFrom(com.google.protobuf.Message other) {
8726         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) {
8727           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other);
8728         } else {
8729           super.mergeFrom(other);
8730           return this;
8731         }
8732       }
8733 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other)8734       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) {
8735         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this;
8736         if (other.hasLenAsVal()) {
8737           setLenAsVal(other.getLenAsVal());
8738         }
8739         this.mergeUnknownFields(other.getUnknownFields());
8740         return this;
8741       }
8742 
isInitialized()8743       public final boolean isInitialized() {
8744         if (!hasLenAsVal()) {
8745 
8746           return false;
8747         }
8748         return true;
8749       }
8750 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8751       public Builder mergeFrom(
8752           com.google.protobuf.CodedInputStream input,
8753           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8754           throws java.io.IOException {
8755         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parsedMessage = null;
8756         try {
8757           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8758         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8759           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) e.getUnfinishedMessage();
8760           throw e;
8761         } finally {
8762           if (parsedMessage != null) {
8763             mergeFrom(parsedMessage);
8764           }
8765         }
8766         return this;
8767       }
8768       private int bitField0_;
8769 
8770       // required bool len_as_val = 1;
8771       private boolean lenAsVal_ ;
8772       /**
8773        * <code>required bool len_as_val = 1;</code>
8774        */
hasLenAsVal()8775       public boolean hasLenAsVal() {
8776         return ((bitField0_ & 0x00000001) == 0x00000001);
8777       }
8778       /**
8779        * <code>required bool len_as_val = 1;</code>
8780        */
getLenAsVal()8781       public boolean getLenAsVal() {
8782         return lenAsVal_;
8783       }
8784       /**
8785        * <code>required bool len_as_val = 1;</code>
8786        */
setLenAsVal(boolean value)8787       public Builder setLenAsVal(boolean value) {
8788         bitField0_ |= 0x00000001;
8789         lenAsVal_ = value;
8790         onChanged();
8791         return this;
8792       }
8793       /**
8794        * <code>required bool len_as_val = 1;</code>
8795        */
clearLenAsVal()8796       public Builder clearLenAsVal() {
8797         bitField0_ = (bitField0_ & ~0x00000001);
8798         lenAsVal_ = false;
8799         onChanged();
8800         return this;
8801       }
8802 
8803       // @@protoc_insertion_point(builder_scope:KeyOnlyFilter)
8804     }
8805 
8806     static {
8807       defaultInstance = new KeyOnlyFilter(true);
defaultInstance.initFields()8808       defaultInstance.initFields();
8809     }
8810 
8811     // @@protoc_insertion_point(class_scope:KeyOnlyFilter)
8812   }
8813 
8814   public interface MultipleColumnPrefixFilterOrBuilder
8815       extends com.google.protobuf.MessageOrBuilder {
8816 
8817     // repeated bytes sorted_prefixes = 1;
8818     /**
8819      * <code>repeated bytes sorted_prefixes = 1;</code>
8820      */
getSortedPrefixesList()8821     java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList();
8822     /**
8823      * <code>repeated bytes sorted_prefixes = 1;</code>
8824      */
getSortedPrefixesCount()8825     int getSortedPrefixesCount();
8826     /**
8827      * <code>repeated bytes sorted_prefixes = 1;</code>
8828      */
getSortedPrefixes(int index)8829     com.google.protobuf.ByteString getSortedPrefixes(int index);
8830   }
8831   /**
8832    * Protobuf type {@code MultipleColumnPrefixFilter}
8833    */
8834   public static final class MultipleColumnPrefixFilter extends
8835       com.google.protobuf.GeneratedMessage
8836       implements MultipleColumnPrefixFilterOrBuilder {
8837     // Use MultipleColumnPrefixFilter.newBuilder() to construct.
MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)8838     private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8839       super(builder);
8840       this.unknownFields = builder.getUnknownFields();
8841     }
MultipleColumnPrefixFilter(boolean noInit)8842     private MultipleColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8843 
8844     private static final MultipleColumnPrefixFilter defaultInstance;
getDefaultInstance()8845     public static MultipleColumnPrefixFilter getDefaultInstance() {
8846       return defaultInstance;
8847     }
8848 
getDefaultInstanceForType()8849     public MultipleColumnPrefixFilter getDefaultInstanceForType() {
8850       return defaultInstance;
8851     }
8852 
8853     private final com.google.protobuf.UnknownFieldSet unknownFields;
8854     @java.lang.Override
8855     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8856         getUnknownFields() {
8857       return this.unknownFields;
8858     }
MultipleColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8859     private MultipleColumnPrefixFilter(
8860         com.google.protobuf.CodedInputStream input,
8861         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8862         throws com.google.protobuf.InvalidProtocolBufferException {
8863       initFields();
8864       int mutable_bitField0_ = 0;
8865       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8866           com.google.protobuf.UnknownFieldSet.newBuilder();
8867       try {
8868         boolean done = false;
8869         while (!done) {
8870           int tag = input.readTag();
8871           switch (tag) {
8872             case 0:
8873               done = true;
8874               break;
8875             default: {
8876               if (!parseUnknownField(input, unknownFields,
8877                                      extensionRegistry, tag)) {
8878                 done = true;
8879               }
8880               break;
8881             }
8882             case 10: {
8883               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
8884                 sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
8885                 mutable_bitField0_ |= 0x00000001;
8886               }
8887               sortedPrefixes_.add(input.readBytes());
8888               break;
8889             }
8890           }
8891         }
8892       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8893         throw e.setUnfinishedMessage(this);
8894       } catch (java.io.IOException e) {
8895         throw new com.google.protobuf.InvalidProtocolBufferException(
8896             e.getMessage()).setUnfinishedMessage(this);
8897       } finally {
8898         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
8899           sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_);
8900         }
8901         this.unknownFields = unknownFields.build();
8902         makeExtensionsImmutable();
8903       }
8904     }
8905     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8906         getDescriptor() {
8907       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor;
8908     }
8909 
8910     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8911         internalGetFieldAccessorTable() {
8912       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable
8913           .ensureFieldAccessorsInitialized(
8914               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class);
8915     }
8916 
8917     public static com.google.protobuf.Parser<MultipleColumnPrefixFilter> PARSER =
8918         new com.google.protobuf.AbstractParser<MultipleColumnPrefixFilter>() {
8919       public MultipleColumnPrefixFilter parsePartialFrom(
8920           com.google.protobuf.CodedInputStream input,
8921           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8922           throws com.google.protobuf.InvalidProtocolBufferException {
8923         return new MultipleColumnPrefixFilter(input, extensionRegistry);
8924       }
8925     };
8926 
8927     @java.lang.Override
getParserForType()8928     public com.google.protobuf.Parser<MultipleColumnPrefixFilter> getParserForType() {
8929       return PARSER;
8930     }
8931 
8932     // repeated bytes sorted_prefixes = 1;
8933     public static final int SORTED_PREFIXES_FIELD_NUMBER = 1;
8934     private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_;
8935     /**
8936      * <code>repeated bytes sorted_prefixes = 1;</code>
8937      */
8938     public java.util.List<com.google.protobuf.ByteString>
getSortedPrefixesList()8939         getSortedPrefixesList() {
8940       return sortedPrefixes_;
8941     }
8942     /**
8943      * <code>repeated bytes sorted_prefixes = 1;</code>
8944      */
getSortedPrefixesCount()8945     public int getSortedPrefixesCount() {
8946       return sortedPrefixes_.size();
8947     }
8948     /**
8949      * <code>repeated bytes sorted_prefixes = 1;</code>
8950      */
getSortedPrefixes(int index)8951     public com.google.protobuf.ByteString getSortedPrefixes(int index) {
8952       return sortedPrefixes_.get(index);
8953     }
8954 
initFields()8955     private void initFields() {
8956       sortedPrefixes_ = java.util.Collections.emptyList();
8957     }
8958     private byte memoizedIsInitialized = -1;
isInitialized()8959     public final boolean isInitialized() {
8960       byte isInitialized = memoizedIsInitialized;
8961       if (isInitialized != -1) return isInitialized == 1;
8962 
8963       memoizedIsInitialized = 1;
8964       return true;
8965     }
8966 
writeTo(com.google.protobuf.CodedOutputStream output)8967     public void writeTo(com.google.protobuf.CodedOutputStream output)
8968                         throws java.io.IOException {
8969       getSerializedSize();
8970       for (int i = 0; i < sortedPrefixes_.size(); i++) {
8971         output.writeBytes(1, sortedPrefixes_.get(i));
8972       }
8973       getUnknownFields().writeTo(output);
8974     }
8975 
8976     private int memoizedSerializedSize = -1;
getSerializedSize()8977     public int getSerializedSize() {
8978       int size = memoizedSerializedSize;
8979       if (size != -1) return size;
8980 
8981       size = 0;
8982       {
8983         int dataSize = 0;
8984         for (int i = 0; i < sortedPrefixes_.size(); i++) {
8985           dataSize += com.google.protobuf.CodedOutputStream
8986             .computeBytesSizeNoTag(sortedPrefixes_.get(i));
8987         }
8988         size += dataSize;
8989         size += 1 * getSortedPrefixesList().size();
8990       }
8991       size += getUnknownFields().getSerializedSize();
8992       memoizedSerializedSize = size;
8993       return size;
8994     }
8995 
8996     private static final long serialVersionUID = 0L;
8997     @java.lang.Override
writeReplace()8998     protected java.lang.Object writeReplace()
8999         throws java.io.ObjectStreamException {
9000       return super.writeReplace();
9001     }
9002 
9003     @java.lang.Override
equals(final java.lang.Object obj)9004     public boolean equals(final java.lang.Object obj) {
9005       if (obj == this) {
9006        return true;
9007       }
9008       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)) {
9009         return super.equals(obj);
9010       }
9011       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj;
9012 
9013       boolean result = true;
9014       result = result && getSortedPrefixesList()
9015           .equals(other.getSortedPrefixesList());
9016       result = result &&
9017           getUnknownFields().equals(other.getUnknownFields());
9018       return result;
9019     }
9020 
9021     private int memoizedHashCode = 0;
9022     @java.lang.Override
hashCode()9023     public int hashCode() {
9024       if (memoizedHashCode != 0) {
9025         return memoizedHashCode;
9026       }
9027       int hash = 41;
9028       hash = (19 * hash) + getDescriptorForType().hashCode();
9029       if (getSortedPrefixesCount() > 0) {
9030         hash = (37 * hash) + SORTED_PREFIXES_FIELD_NUMBER;
9031         hash = (53 * hash) + getSortedPrefixesList().hashCode();
9032       }
9033       hash = (29 * hash) + getUnknownFields().hashCode();
9034       memoizedHashCode = hash;
9035       return hash;
9036     }
9037 
parseFrom( com.google.protobuf.ByteString data)9038     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
9039         com.google.protobuf.ByteString data)
9040         throws com.google.protobuf.InvalidProtocolBufferException {
9041       return PARSER.parseFrom(data);
9042     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9043     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
9044         com.google.protobuf.ByteString data,
9045         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9046         throws com.google.protobuf.InvalidProtocolBufferException {
9047       return PARSER.parseFrom(data, extensionRegistry);
9048     }
parseFrom(byte[] data)9049     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data)
9050         throws com.google.protobuf.InvalidProtocolBufferException {
9051       return PARSER.parseFrom(data);
9052     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9053     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
9054         byte[] data,
9055         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9056         throws com.google.protobuf.InvalidProtocolBufferException {
9057       return PARSER.parseFrom(data, extensionRegistry);
9058     }
parseFrom(java.io.InputStream input)9059     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input)
9060         throws java.io.IOException {
9061       return PARSER.parseFrom(input);
9062     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9063     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
9064         java.io.InputStream input,
9065         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9066         throws java.io.IOException {
9067       return PARSER.parseFrom(input, extensionRegistry);
9068     }
parseDelimitedFrom(java.io.InputStream input)9069     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input)
9070         throws java.io.IOException {
9071       return PARSER.parseDelimitedFrom(input);
9072     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9073     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseDelimitedFrom(
9074         java.io.InputStream input,
9075         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9076         throws java.io.IOException {
9077       return PARSER.parseDelimitedFrom(input, extensionRegistry);
9078     }
parseFrom( com.google.protobuf.CodedInputStream input)9079     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
9080         com.google.protobuf.CodedInputStream input)
9081         throws java.io.IOException {
9082       return PARSER.parseFrom(input);
9083     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9084     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(
9085         com.google.protobuf.CodedInputStream input,
9086         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9087         throws java.io.IOException {
9088       return PARSER.parseFrom(input, extensionRegistry);
9089     }
9090 
newBuilder()9091     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()9092     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype)9093     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter prototype) {
9094       return newBuilder().mergeFrom(prototype);
9095     }
toBuilder()9096     public Builder toBuilder() { return newBuilder(this); }
9097 
9098     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9099     protected Builder newBuilderForType(
9100         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9101       Builder builder = new Builder(parent);
9102       return builder;
9103     }
9104     /**
9105      * Protobuf type {@code MultipleColumnPrefixFilter}
9106      */
9107     public static final class Builder extends
9108         com.google.protobuf.GeneratedMessage.Builder<Builder>
9109        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilterOrBuilder {
9110       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9111           getDescriptor() {
9112         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor;
9113       }
9114 
9115       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9116           internalGetFieldAccessorTable() {
9117         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable
9118             .ensureFieldAccessorsInitialized(
9119                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class);
9120       }
9121 
9122       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.newBuilder()
Builder()9123       private Builder() {
9124         maybeForceBuilderInitialization();
9125       }
9126 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9127       private Builder(
9128           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9129         super(parent);
9130         maybeForceBuilderInitialization();
9131       }
maybeForceBuilderInitialization()9132       private void maybeForceBuilderInitialization() {
9133         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9134         }
9135       }
create()9136       private static Builder create() {
9137         return new Builder();
9138       }
9139 
clear()9140       public Builder clear() {
9141         super.clear();
9142         sortedPrefixes_ = java.util.Collections.emptyList();
9143         bitField0_ = (bitField0_ & ~0x00000001);
9144         return this;
9145       }
9146 
clone()9147       public Builder clone() {
9148         return create().mergeFrom(buildPartial());
9149       }
9150 
9151       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()9152           getDescriptorForType() {
9153         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor;
9154       }
9155 
getDefaultInstanceForType()9156       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter getDefaultInstanceForType() {
9157         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance();
9158       }
9159 
build()9160       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter build() {
9161         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = buildPartial();
9162         if (!result.isInitialized()) {
9163           throw newUninitializedMessageException(result);
9164         }
9165         return result;
9166       }
9167 
buildPartial()9168       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter buildPartial() {
9169         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter(this);
9170         int from_bitField0_ = bitField0_;
9171         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9172           sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_);
9173           bitField0_ = (bitField0_ & ~0x00000001);
9174         }
9175         result.sortedPrefixes_ = sortedPrefixes_;
9176         onBuilt();
9177         return result;
9178       }
9179 
mergeFrom(com.google.protobuf.Message other)9180       public Builder mergeFrom(com.google.protobuf.Message other) {
9181         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) {
9182           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)other);
9183         } else {
9184           super.mergeFrom(other);
9185           return this;
9186         }
9187       }
9188 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other)9189       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other) {
9190         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.getDefaultInstance()) return this;
9191         if (!other.sortedPrefixes_.isEmpty()) {
9192           if (sortedPrefixes_.isEmpty()) {
9193             sortedPrefixes_ = other.sortedPrefixes_;
9194             bitField0_ = (bitField0_ & ~0x00000001);
9195           } else {
9196             ensureSortedPrefixesIsMutable();
9197             sortedPrefixes_.addAll(other.sortedPrefixes_);
9198           }
9199           onChanged();
9200         }
9201         this.mergeUnknownFields(other.getUnknownFields());
9202         return this;
9203       }
9204 
isInitialized()9205       public final boolean isInitialized() {
9206         return true;
9207       }
9208 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9209       public Builder mergeFrom(
9210           com.google.protobuf.CodedInputStream input,
9211           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9212           throws java.io.IOException {
9213         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parsedMessage = null;
9214         try {
9215           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9216         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9217           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) e.getUnfinishedMessage();
9218           throw e;
9219         } finally {
9220           if (parsedMessage != null) {
9221             mergeFrom(parsedMessage);
9222           }
9223         }
9224         return this;
9225       }
9226       private int bitField0_;
9227 
9228       // repeated bytes sorted_prefixes = 1;
9229       private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_ = java.util.Collections.emptyList();
ensureSortedPrefixesIsMutable()9230       private void ensureSortedPrefixesIsMutable() {
9231         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
9232           sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>(sortedPrefixes_);
9233           bitField0_ |= 0x00000001;
9234          }
9235       }
9236       /**
9237        * <code>repeated bytes sorted_prefixes = 1;</code>
9238        */
9239       public java.util.List<com.google.protobuf.ByteString>
getSortedPrefixesList()9240           getSortedPrefixesList() {
9241         return java.util.Collections.unmodifiableList(sortedPrefixes_);
9242       }
9243       /**
9244        * <code>repeated bytes sorted_prefixes = 1;</code>
9245        */
getSortedPrefixesCount()9246       public int getSortedPrefixesCount() {
9247         return sortedPrefixes_.size();
9248       }
9249       /**
9250        * <code>repeated bytes sorted_prefixes = 1;</code>
9251        */
getSortedPrefixes(int index)9252       public com.google.protobuf.ByteString getSortedPrefixes(int index) {
9253         return sortedPrefixes_.get(index);
9254       }
9255       /**
9256        * <code>repeated bytes sorted_prefixes = 1;</code>
9257        */
setSortedPrefixes( int index, com.google.protobuf.ByteString value)9258       public Builder setSortedPrefixes(
9259           int index, com.google.protobuf.ByteString value) {
9260         if (value == null) {
9261     throw new NullPointerException();
9262   }
9263   ensureSortedPrefixesIsMutable();
9264         sortedPrefixes_.set(index, value);
9265         onChanged();
9266         return this;
9267       }
9268       /**
9269        * <code>repeated bytes sorted_prefixes = 1;</code>
9270        */
addSortedPrefixes(com.google.protobuf.ByteString value)9271       public Builder addSortedPrefixes(com.google.protobuf.ByteString value) {
9272         if (value == null) {
9273     throw new NullPointerException();
9274   }
9275   ensureSortedPrefixesIsMutable();
9276         sortedPrefixes_.add(value);
9277         onChanged();
9278         return this;
9279       }
9280       /**
9281        * <code>repeated bytes sorted_prefixes = 1;</code>
9282        */
addAllSortedPrefixes( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)9283       public Builder addAllSortedPrefixes(
9284           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
9285         ensureSortedPrefixesIsMutable();
9286         super.addAll(values, sortedPrefixes_);
9287         onChanged();
9288         return this;
9289       }
9290       /**
9291        * <code>repeated bytes sorted_prefixes = 1;</code>
9292        */
clearSortedPrefixes()9293       public Builder clearSortedPrefixes() {
9294         sortedPrefixes_ = java.util.Collections.emptyList();
9295         bitField0_ = (bitField0_ & ~0x00000001);
9296         onChanged();
9297         return this;
9298       }
9299 
9300       // @@protoc_insertion_point(builder_scope:MultipleColumnPrefixFilter)
9301     }
9302 
9303     static {
9304       defaultInstance = new MultipleColumnPrefixFilter(true);
defaultInstance.initFields()9305       defaultInstance.initFields();
9306     }
9307 
9308     // @@protoc_insertion_point(class_scope:MultipleColumnPrefixFilter)
9309   }
9310 
9311   public interface PageFilterOrBuilder
9312       extends com.google.protobuf.MessageOrBuilder {
9313 
9314     // required int64 page_size = 1;
9315     /**
9316      * <code>required int64 page_size = 1;</code>
9317      */
hasPageSize()9318     boolean hasPageSize();
9319     /**
9320      * <code>required int64 page_size = 1;</code>
9321      */
getPageSize()9322     long getPageSize();
9323   }
9324   /**
9325    * Protobuf type {@code PageFilter}
9326    */
9327   public static final class PageFilter extends
9328       com.google.protobuf.GeneratedMessage
9329       implements PageFilterOrBuilder {
9330     // Use PageFilter.newBuilder() to construct.
PageFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)9331     private PageFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9332       super(builder);
9333       this.unknownFields = builder.getUnknownFields();
9334     }
PageFilter(boolean noInit)9335     private PageFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9336 
9337     private static final PageFilter defaultInstance;
getDefaultInstance()9338     public static PageFilter getDefaultInstance() {
9339       return defaultInstance;
9340     }
9341 
getDefaultInstanceForType()9342     public PageFilter getDefaultInstanceForType() {
9343       return defaultInstance;
9344     }
9345 
9346     private final com.google.protobuf.UnknownFieldSet unknownFields;
9347     @java.lang.Override
9348     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()9349         getUnknownFields() {
9350       return this.unknownFields;
9351     }
PageFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9352     private PageFilter(
9353         com.google.protobuf.CodedInputStream input,
9354         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9355         throws com.google.protobuf.InvalidProtocolBufferException {
9356       initFields();
9357       int mutable_bitField0_ = 0;
9358       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9359           com.google.protobuf.UnknownFieldSet.newBuilder();
9360       try {
9361         boolean done = false;
9362         while (!done) {
9363           int tag = input.readTag();
9364           switch (tag) {
9365             case 0:
9366               done = true;
9367               break;
9368             default: {
9369               if (!parseUnknownField(input, unknownFields,
9370                                      extensionRegistry, tag)) {
9371                 done = true;
9372               }
9373               break;
9374             }
9375             case 8: {
9376               bitField0_ |= 0x00000001;
9377               pageSize_ = input.readInt64();
9378               break;
9379             }
9380           }
9381         }
9382       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9383         throw e.setUnfinishedMessage(this);
9384       } catch (java.io.IOException e) {
9385         throw new com.google.protobuf.InvalidProtocolBufferException(
9386             e.getMessage()).setUnfinishedMessage(this);
9387       } finally {
9388         this.unknownFields = unknownFields.build();
9389         makeExtensionsImmutable();
9390       }
9391     }
9392     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9393         getDescriptor() {
9394       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor;
9395     }
9396 
9397     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9398         internalGetFieldAccessorTable() {
9399       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable
9400           .ensureFieldAccessorsInitialized(
9401               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class);
9402     }
9403 
9404     public static com.google.protobuf.Parser<PageFilter> PARSER =
9405         new com.google.protobuf.AbstractParser<PageFilter>() {
9406       public PageFilter parsePartialFrom(
9407           com.google.protobuf.CodedInputStream input,
9408           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9409           throws com.google.protobuf.InvalidProtocolBufferException {
9410         return new PageFilter(input, extensionRegistry);
9411       }
9412     };
9413 
9414     @java.lang.Override
getParserForType()9415     public com.google.protobuf.Parser<PageFilter> getParserForType() {
9416       return PARSER;
9417     }
9418 
9419     private int bitField0_;
9420     // required int64 page_size = 1;
9421     public static final int PAGE_SIZE_FIELD_NUMBER = 1;
9422     private long pageSize_;
9423     /**
9424      * <code>required int64 page_size = 1;</code>
9425      */
hasPageSize()9426     public boolean hasPageSize() {
9427       return ((bitField0_ & 0x00000001) == 0x00000001);
9428     }
9429     /**
9430      * <code>required int64 page_size = 1;</code>
9431      */
getPageSize()9432     public long getPageSize() {
9433       return pageSize_;
9434     }
9435 
initFields()9436     private void initFields() {
9437       pageSize_ = 0L;
9438     }
9439     private byte memoizedIsInitialized = -1;
isInitialized()9440     public final boolean isInitialized() {
9441       byte isInitialized = memoizedIsInitialized;
9442       if (isInitialized != -1) return isInitialized == 1;
9443 
9444       if (!hasPageSize()) {
9445         memoizedIsInitialized = 0;
9446         return false;
9447       }
9448       memoizedIsInitialized = 1;
9449       return true;
9450     }
9451 
writeTo(com.google.protobuf.CodedOutputStream output)9452     public void writeTo(com.google.protobuf.CodedOutputStream output)
9453                         throws java.io.IOException {
9454       getSerializedSize();
9455       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9456         output.writeInt64(1, pageSize_);
9457       }
9458       getUnknownFields().writeTo(output);
9459     }
9460 
9461     private int memoizedSerializedSize = -1;
getSerializedSize()9462     public int getSerializedSize() {
9463       int size = memoizedSerializedSize;
9464       if (size != -1) return size;
9465 
9466       size = 0;
9467       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9468         size += com.google.protobuf.CodedOutputStream
9469           .computeInt64Size(1, pageSize_);
9470       }
9471       size += getUnknownFields().getSerializedSize();
9472       memoizedSerializedSize = size;
9473       return size;
9474     }
9475 
9476     private static final long serialVersionUID = 0L;
9477     @java.lang.Override
writeReplace()9478     protected java.lang.Object writeReplace()
9479         throws java.io.ObjectStreamException {
9480       return super.writeReplace();
9481     }
9482 
9483     @java.lang.Override
equals(final java.lang.Object obj)9484     public boolean equals(final java.lang.Object obj) {
9485       if (obj == this) {
9486        return true;
9487       }
9488       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)) {
9489         return super.equals(obj);
9490       }
9491       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) obj;
9492 
9493       boolean result = true;
9494       result = result && (hasPageSize() == other.hasPageSize());
9495       if (hasPageSize()) {
9496         result = result && (getPageSize()
9497             == other.getPageSize());
9498       }
9499       result = result &&
9500           getUnknownFields().equals(other.getUnknownFields());
9501       return result;
9502     }
9503 
9504     private int memoizedHashCode = 0;
9505     @java.lang.Override
hashCode()9506     public int hashCode() {
9507       if (memoizedHashCode != 0) {
9508         return memoizedHashCode;
9509       }
9510       int hash = 41;
9511       hash = (19 * hash) + getDescriptorForType().hashCode();
9512       if (hasPageSize()) {
9513         hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
9514         hash = (53 * hash) + hashLong(getPageSize());
9515       }
9516       hash = (29 * hash) + getUnknownFields().hashCode();
9517       memoizedHashCode = hash;
9518       return hash;
9519     }
9520 
parseFrom( com.google.protobuf.ByteString data)9521     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
9522         com.google.protobuf.ByteString data)
9523         throws com.google.protobuf.InvalidProtocolBufferException {
9524       return PARSER.parseFrom(data);
9525     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9526     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
9527         com.google.protobuf.ByteString data,
9528         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9529         throws com.google.protobuf.InvalidProtocolBufferException {
9530       return PARSER.parseFrom(data, extensionRegistry);
9531     }
parseFrom(byte[] data)9532     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(byte[] data)
9533         throws com.google.protobuf.InvalidProtocolBufferException {
9534       return PARSER.parseFrom(data);
9535     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9536     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
9537         byte[] data,
9538         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9539         throws com.google.protobuf.InvalidProtocolBufferException {
9540       return PARSER.parseFrom(data, extensionRegistry);
9541     }
parseFrom(java.io.InputStream input)9542     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(java.io.InputStream input)
9543         throws java.io.IOException {
9544       return PARSER.parseFrom(input);
9545     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9546     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
9547         java.io.InputStream input,
9548         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9549         throws java.io.IOException {
9550       return PARSER.parseFrom(input, extensionRegistry);
9551     }
parseDelimitedFrom(java.io.InputStream input)9552     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(java.io.InputStream input)
9553         throws java.io.IOException {
9554       return PARSER.parseDelimitedFrom(input);
9555     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9556     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseDelimitedFrom(
9557         java.io.InputStream input,
9558         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9559         throws java.io.IOException {
9560       return PARSER.parseDelimitedFrom(input, extensionRegistry);
9561     }
parseFrom( com.google.protobuf.CodedInputStream input)9562     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
9563         com.google.protobuf.CodedInputStream input)
9564         throws java.io.IOException {
9565       return PARSER.parseFrom(input);
9566     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9567     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parseFrom(
9568         com.google.protobuf.CodedInputStream input,
9569         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9570         throws java.io.IOException {
9571       return PARSER.parseFrom(input, extensionRegistry);
9572     }
9573 
newBuilder()9574     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()9575     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype)9576     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter prototype) {
9577       return newBuilder().mergeFrom(prototype);
9578     }
toBuilder()9579     public Builder toBuilder() { return newBuilder(this); }
9580 
9581     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9582     protected Builder newBuilderForType(
9583         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9584       Builder builder = new Builder(parent);
9585       return builder;
9586     }
9587     /**
9588      * Protobuf type {@code PageFilter}
9589      */
9590     public static final class Builder extends
9591         com.google.protobuf.GeneratedMessage.Builder<Builder>
9592        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilterOrBuilder {
9593       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9594           getDescriptor() {
9595         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor;
9596       }
9597 
9598       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9599           internalGetFieldAccessorTable() {
9600         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_fieldAccessorTable
9601             .ensureFieldAccessorsInitialized(
9602                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.Builder.class);
9603       }
9604 
9605       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.newBuilder()
Builder()9606       private Builder() {
9607         maybeForceBuilderInitialization();
9608       }
9609 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9610       private Builder(
9611           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9612         super(parent);
9613         maybeForceBuilderInitialization();
9614       }
maybeForceBuilderInitialization()9615       private void maybeForceBuilderInitialization() {
9616         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9617         }
9618       }
create()9619       private static Builder create() {
9620         return new Builder();
9621       }
9622 
clear()9623       public Builder clear() {
9624         super.clear();
9625         pageSize_ = 0L;
9626         bitField0_ = (bitField0_ & ~0x00000001);
9627         return this;
9628       }
9629 
clone()9630       public Builder clone() {
9631         return create().mergeFrom(buildPartial());
9632       }
9633 
9634       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()9635           getDescriptorForType() {
9636         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PageFilter_descriptor;
9637       }
9638 
getDefaultInstanceForType()9639       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter getDefaultInstanceForType() {
9640         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance();
9641       }
9642 
build()9643       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter build() {
9644         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = buildPartial();
9645         if (!result.isInitialized()) {
9646           throw newUninitializedMessageException(result);
9647         }
9648         return result;
9649       }
9650 
buildPartial()9651       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter buildPartial() {
9652         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter(this);
9653         int from_bitField0_ = bitField0_;
9654         int to_bitField0_ = 0;
9655         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9656           to_bitField0_ |= 0x00000001;
9657         }
9658         result.pageSize_ = pageSize_;
9659         result.bitField0_ = to_bitField0_;
9660         onBuilt();
9661         return result;
9662       }
9663 
mergeFrom(com.google.protobuf.Message other)9664       public Builder mergeFrom(com.google.protobuf.Message other) {
9665         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) {
9666           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter)other);
9667         } else {
9668           super.mergeFrom(other);
9669           return this;
9670         }
9671       }
9672 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other)9673       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter other) {
9674         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter.getDefaultInstance()) return this;
9675         if (other.hasPageSize()) {
9676           setPageSize(other.getPageSize());
9677         }
9678         this.mergeUnknownFields(other.getUnknownFields());
9679         return this;
9680       }
9681 
isInitialized()9682       public final boolean isInitialized() {
9683         if (!hasPageSize()) {
9684 
9685           return false;
9686         }
9687         return true;
9688       }
9689 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9690       public Builder mergeFrom(
9691           com.google.protobuf.CodedInputStream input,
9692           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9693           throws java.io.IOException {
9694         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter parsedMessage = null;
9695         try {
9696           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9697         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9698           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PageFilter) e.getUnfinishedMessage();
9699           throw e;
9700         } finally {
9701           if (parsedMessage != null) {
9702             mergeFrom(parsedMessage);
9703           }
9704         }
9705         return this;
9706       }
9707       private int bitField0_;
9708 
9709       // required int64 page_size = 1;
9710       private long pageSize_ ;
9711       /**
9712        * <code>required int64 page_size = 1;</code>
9713        */
hasPageSize()9714       public boolean hasPageSize() {
9715         return ((bitField0_ & 0x00000001) == 0x00000001);
9716       }
9717       /**
9718        * <code>required int64 page_size = 1;</code>
9719        */
getPageSize()9720       public long getPageSize() {
9721         return pageSize_;
9722       }
9723       /**
9724        * <code>required int64 page_size = 1;</code>
9725        */
setPageSize(long value)9726       public Builder setPageSize(long value) {
9727         bitField0_ |= 0x00000001;
9728         pageSize_ = value;
9729         onChanged();
9730         return this;
9731       }
9732       /**
9733        * <code>required int64 page_size = 1;</code>
9734        */
clearPageSize()9735       public Builder clearPageSize() {
9736         bitField0_ = (bitField0_ & ~0x00000001);
9737         pageSize_ = 0L;
9738         onChanged();
9739         return this;
9740       }
9741 
9742       // @@protoc_insertion_point(builder_scope:PageFilter)
9743     }
9744 
9745     static {
9746       defaultInstance = new PageFilter(true);
defaultInstance.initFields()9747       defaultInstance.initFields();
9748     }
9749 
9750     // @@protoc_insertion_point(class_scope:PageFilter)
9751   }
9752 
9753   public interface PrefixFilterOrBuilder
9754       extends com.google.protobuf.MessageOrBuilder {
9755 
9756     // optional bytes prefix = 1;
9757     /**
9758      * <code>optional bytes prefix = 1;</code>
9759      */
hasPrefix()9760     boolean hasPrefix();
9761     /**
9762      * <code>optional bytes prefix = 1;</code>
9763      */
getPrefix()9764     com.google.protobuf.ByteString getPrefix();
9765   }
9766   /**
9767    * Protobuf type {@code PrefixFilter}
9768    */
9769   public static final class PrefixFilter extends
9770       com.google.protobuf.GeneratedMessage
9771       implements PrefixFilterOrBuilder {
9772     // Use PrefixFilter.newBuilder() to construct.
PrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)9773     private PrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9774       super(builder);
9775       this.unknownFields = builder.getUnknownFields();
9776     }
PrefixFilter(boolean noInit)9777     private PrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9778 
9779     private static final PrefixFilter defaultInstance;
getDefaultInstance()9780     public static PrefixFilter getDefaultInstance() {
9781       return defaultInstance;
9782     }
9783 
getDefaultInstanceForType()9784     public PrefixFilter getDefaultInstanceForType() {
9785       return defaultInstance;
9786     }
9787 
9788     private final com.google.protobuf.UnknownFieldSet unknownFields;
9789     @java.lang.Override
9790     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()9791         getUnknownFields() {
9792       return this.unknownFields;
9793     }
PrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9794     private PrefixFilter(
9795         com.google.protobuf.CodedInputStream input,
9796         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9797         throws com.google.protobuf.InvalidProtocolBufferException {
9798       initFields();
9799       int mutable_bitField0_ = 0;
9800       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9801           com.google.protobuf.UnknownFieldSet.newBuilder();
9802       try {
9803         boolean done = false;
9804         while (!done) {
9805           int tag = input.readTag();
9806           switch (tag) {
9807             case 0:
9808               done = true;
9809               break;
9810             default: {
9811               if (!parseUnknownField(input, unknownFields,
9812                                      extensionRegistry, tag)) {
9813                 done = true;
9814               }
9815               break;
9816             }
9817             case 10: {
9818               bitField0_ |= 0x00000001;
9819               prefix_ = input.readBytes();
9820               break;
9821             }
9822           }
9823         }
9824       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9825         throw e.setUnfinishedMessage(this);
9826       } catch (java.io.IOException e) {
9827         throw new com.google.protobuf.InvalidProtocolBufferException(
9828             e.getMessage()).setUnfinishedMessage(this);
9829       } finally {
9830         this.unknownFields = unknownFields.build();
9831         makeExtensionsImmutable();
9832       }
9833     }
9834     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9835         getDescriptor() {
9836       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor;
9837     }
9838 
9839     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9840         internalGetFieldAccessorTable() {
9841       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable
9842           .ensureFieldAccessorsInitialized(
9843               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class);
9844     }
9845 
9846     public static com.google.protobuf.Parser<PrefixFilter> PARSER =
9847         new com.google.protobuf.AbstractParser<PrefixFilter>() {
9848       public PrefixFilter parsePartialFrom(
9849           com.google.protobuf.CodedInputStream input,
9850           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9851           throws com.google.protobuf.InvalidProtocolBufferException {
9852         return new PrefixFilter(input, extensionRegistry);
9853       }
9854     };
9855 
9856     @java.lang.Override
getParserForType()9857     public com.google.protobuf.Parser<PrefixFilter> getParserForType() {
9858       return PARSER;
9859     }
9860 
9861     private int bitField0_;
9862     // optional bytes prefix = 1;
9863     public static final int PREFIX_FIELD_NUMBER = 1;
9864     private com.google.protobuf.ByteString prefix_;
9865     /**
9866      * <code>optional bytes prefix = 1;</code>
9867      */
hasPrefix()9868     public boolean hasPrefix() {
9869       return ((bitField0_ & 0x00000001) == 0x00000001);
9870     }
9871     /**
9872      * <code>optional bytes prefix = 1;</code>
9873      */
getPrefix()9874     public com.google.protobuf.ByteString getPrefix() {
9875       return prefix_;
9876     }
9877 
initFields()9878     private void initFields() {
9879       prefix_ = com.google.protobuf.ByteString.EMPTY;
9880     }
9881     private byte memoizedIsInitialized = -1;
isInitialized()9882     public final boolean isInitialized() {
9883       byte isInitialized = memoizedIsInitialized;
9884       if (isInitialized != -1) return isInitialized == 1;
9885 
9886       memoizedIsInitialized = 1;
9887       return true;
9888     }
9889 
writeTo(com.google.protobuf.CodedOutputStream output)9890     public void writeTo(com.google.protobuf.CodedOutputStream output)
9891                         throws java.io.IOException {
9892       getSerializedSize();
9893       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9894         output.writeBytes(1, prefix_);
9895       }
9896       getUnknownFields().writeTo(output);
9897     }
9898 
9899     private int memoizedSerializedSize = -1;
getSerializedSize()9900     public int getSerializedSize() {
9901       int size = memoizedSerializedSize;
9902       if (size != -1) return size;
9903 
9904       size = 0;
9905       if (((bitField0_ & 0x00000001) == 0x00000001)) {
9906         size += com.google.protobuf.CodedOutputStream
9907           .computeBytesSize(1, prefix_);
9908       }
9909       size += getUnknownFields().getSerializedSize();
9910       memoizedSerializedSize = size;
9911       return size;
9912     }
9913 
9914     private static final long serialVersionUID = 0L;
9915     @java.lang.Override
writeReplace()9916     protected java.lang.Object writeReplace()
9917         throws java.io.ObjectStreamException {
9918       return super.writeReplace();
9919     }
9920 
9921     @java.lang.Override
equals(final java.lang.Object obj)9922     public boolean equals(final java.lang.Object obj) {
9923       if (obj == this) {
9924        return true;
9925       }
9926       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)) {
9927         return super.equals(obj);
9928       }
9929       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) obj;
9930 
9931       boolean result = true;
9932       result = result && (hasPrefix() == other.hasPrefix());
9933       if (hasPrefix()) {
9934         result = result && getPrefix()
9935             .equals(other.getPrefix());
9936       }
9937       result = result &&
9938           getUnknownFields().equals(other.getUnknownFields());
9939       return result;
9940     }
9941 
9942     private int memoizedHashCode = 0;
9943     @java.lang.Override
hashCode()9944     public int hashCode() {
9945       if (memoizedHashCode != 0) {
9946         return memoizedHashCode;
9947       }
9948       int hash = 41;
9949       hash = (19 * hash) + getDescriptorForType().hashCode();
9950       if (hasPrefix()) {
9951         hash = (37 * hash) + PREFIX_FIELD_NUMBER;
9952         hash = (53 * hash) + getPrefix().hashCode();
9953       }
9954       hash = (29 * hash) + getUnknownFields().hashCode();
9955       memoizedHashCode = hash;
9956       return hash;
9957     }
9958 
parseFrom( com.google.protobuf.ByteString data)9959     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
9960         com.google.protobuf.ByteString data)
9961         throws com.google.protobuf.InvalidProtocolBufferException {
9962       return PARSER.parseFrom(data);
9963     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9964     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
9965         com.google.protobuf.ByteString data,
9966         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9967         throws com.google.protobuf.InvalidProtocolBufferException {
9968       return PARSER.parseFrom(data, extensionRegistry);
9969     }
parseFrom(byte[] data)9970     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(byte[] data)
9971         throws com.google.protobuf.InvalidProtocolBufferException {
9972       return PARSER.parseFrom(data);
9973     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9974     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
9975         byte[] data,
9976         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9977         throws com.google.protobuf.InvalidProtocolBufferException {
9978       return PARSER.parseFrom(data, extensionRegistry);
9979     }
parseFrom(java.io.InputStream input)9980     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(java.io.InputStream input)
9981         throws java.io.IOException {
9982       return PARSER.parseFrom(input);
9983     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9984     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
9985         java.io.InputStream input,
9986         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9987         throws java.io.IOException {
9988       return PARSER.parseFrom(input, extensionRegistry);
9989     }
parseDelimitedFrom(java.io.InputStream input)9990     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(java.io.InputStream input)
9991         throws java.io.IOException {
9992       return PARSER.parseDelimitedFrom(input);
9993     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9994     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseDelimitedFrom(
9995         java.io.InputStream input,
9996         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9997         throws java.io.IOException {
9998       return PARSER.parseDelimitedFrom(input, extensionRegistry);
9999     }
parseFrom( com.google.protobuf.CodedInputStream input)10000     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
10001         com.google.protobuf.CodedInputStream input)
10002         throws java.io.IOException {
10003       return PARSER.parseFrom(input);
10004     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10005     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parseFrom(
10006         com.google.protobuf.CodedInputStream input,
10007         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10008         throws java.io.IOException {
10009       return PARSER.parseFrom(input, extensionRegistry);
10010     }
10011 
newBuilder()10012     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()10013     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype)10014     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter prototype) {
10015       return newBuilder().mergeFrom(prototype);
10016     }
toBuilder()10017     public Builder toBuilder() { return newBuilder(this); }
10018 
10019     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10020     protected Builder newBuilderForType(
10021         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10022       Builder builder = new Builder(parent);
10023       return builder;
10024     }
10025     /**
10026      * Protobuf type {@code PrefixFilter}
10027      */
10028     public static final class Builder extends
10029         com.google.protobuf.GeneratedMessage.Builder<Builder>
10030        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilterOrBuilder {
10031       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10032           getDescriptor() {
10033         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor;
10034       }
10035 
10036       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10037           internalGetFieldAccessorTable() {
10038         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_fieldAccessorTable
10039             .ensureFieldAccessorsInitialized(
10040                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.Builder.class);
10041       }
10042 
10043       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.newBuilder()
Builder()10044       private Builder() {
10045         maybeForceBuilderInitialization();
10046       }
10047 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10048       private Builder(
10049           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10050         super(parent);
10051         maybeForceBuilderInitialization();
10052       }
maybeForceBuilderInitialization()10053       private void maybeForceBuilderInitialization() {
10054         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10055         }
10056       }
create()10057       private static Builder create() {
10058         return new Builder();
10059       }
10060 
clear()10061       public Builder clear() {
10062         super.clear();
10063         prefix_ = com.google.protobuf.ByteString.EMPTY;
10064         bitField0_ = (bitField0_ & ~0x00000001);
10065         return this;
10066       }
10067 
clone()10068       public Builder clone() {
10069         return create().mergeFrom(buildPartial());
10070       }
10071 
10072       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()10073           getDescriptorForType() {
10074         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_PrefixFilter_descriptor;
10075       }
10076 
getDefaultInstanceForType()10077       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter getDefaultInstanceForType() {
10078         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance();
10079       }
10080 
build()10081       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter build() {
10082         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = buildPartial();
10083         if (!result.isInitialized()) {
10084           throw newUninitializedMessageException(result);
10085         }
10086         return result;
10087       }
10088 
buildPartial()10089       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter buildPartial() {
10090         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter(this);
10091         int from_bitField0_ = bitField0_;
10092         int to_bitField0_ = 0;
10093         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10094           to_bitField0_ |= 0x00000001;
10095         }
10096         result.prefix_ = prefix_;
10097         result.bitField0_ = to_bitField0_;
10098         onBuilt();
10099         return result;
10100       }
10101 
mergeFrom(com.google.protobuf.Message other)10102       public Builder mergeFrom(com.google.protobuf.Message other) {
10103         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) {
10104           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter)other);
10105         } else {
10106           super.mergeFrom(other);
10107           return this;
10108         }
10109       }
10110 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other)10111       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter other) {
10112         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter.getDefaultInstance()) return this;
10113         if (other.hasPrefix()) {
10114           setPrefix(other.getPrefix());
10115         }
10116         this.mergeUnknownFields(other.getUnknownFields());
10117         return this;
10118       }
10119 
isInitialized()10120       public final boolean isInitialized() {
10121         return true;
10122       }
10123 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10124       public Builder mergeFrom(
10125           com.google.protobuf.CodedInputStream input,
10126           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10127           throws java.io.IOException {
10128         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter parsedMessage = null;
10129         try {
10130           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10131         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10132           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.PrefixFilter) e.getUnfinishedMessage();
10133           throw e;
10134         } finally {
10135           if (parsedMessage != null) {
10136             mergeFrom(parsedMessage);
10137           }
10138         }
10139         return this;
10140       }
10141       private int bitField0_;
10142 
10143       // optional bytes prefix = 1;
10144       private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY;
10145       /**
10146        * <code>optional bytes prefix = 1;</code>
10147        */
hasPrefix()10148       public boolean hasPrefix() {
10149         return ((bitField0_ & 0x00000001) == 0x00000001);
10150       }
10151       /**
10152        * <code>optional bytes prefix = 1;</code>
10153        */
getPrefix()10154       public com.google.protobuf.ByteString getPrefix() {
10155         return prefix_;
10156       }
10157       /**
10158        * <code>optional bytes prefix = 1;</code>
10159        */
setPrefix(com.google.protobuf.ByteString value)10160       public Builder setPrefix(com.google.protobuf.ByteString value) {
10161         if (value == null) {
10162     throw new NullPointerException();
10163   }
10164   bitField0_ |= 0x00000001;
10165         prefix_ = value;
10166         onChanged();
10167         return this;
10168       }
10169       /**
10170        * <code>optional bytes prefix = 1;</code>
10171        */
clearPrefix()10172       public Builder clearPrefix() {
10173         bitField0_ = (bitField0_ & ~0x00000001);
10174         prefix_ = getDefaultInstance().getPrefix();
10175         onChanged();
10176         return this;
10177       }
10178 
10179       // @@protoc_insertion_point(builder_scope:PrefixFilter)
10180     }
10181 
10182     static {
10183       defaultInstance = new PrefixFilter(true);
defaultInstance.initFields()10184       defaultInstance.initFields();
10185     }
10186 
10187     // @@protoc_insertion_point(class_scope:PrefixFilter)
10188   }
10189 
10190   public interface QualifierFilterOrBuilder
10191       extends com.google.protobuf.MessageOrBuilder {
10192 
10193     // required .CompareFilter compare_filter = 1;
10194     /**
10195      * <code>required .CompareFilter compare_filter = 1;</code>
10196      */
hasCompareFilter()10197     boolean hasCompareFilter();
10198     /**
10199      * <code>required .CompareFilter compare_filter = 1;</code>
10200      */
getCompareFilter()10201     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
10202     /**
10203      * <code>required .CompareFilter compare_filter = 1;</code>
10204      */
getCompareFilterOrBuilder()10205     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
10206   }
10207   /**
10208    * Protobuf type {@code QualifierFilter}
10209    */
10210   public static final class QualifierFilter extends
10211       com.google.protobuf.GeneratedMessage
10212       implements QualifierFilterOrBuilder {
10213     // Use QualifierFilter.newBuilder() to construct.
QualifierFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)10214     private QualifierFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10215       super(builder);
10216       this.unknownFields = builder.getUnknownFields();
10217     }
QualifierFilter(boolean noInit)10218     private QualifierFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10219 
10220     private static final QualifierFilter defaultInstance;
getDefaultInstance()10221     public static QualifierFilter getDefaultInstance() {
10222       return defaultInstance;
10223     }
10224 
getDefaultInstanceForType()10225     public QualifierFilter getDefaultInstanceForType() {
10226       return defaultInstance;
10227     }
10228 
10229     private final com.google.protobuf.UnknownFieldSet unknownFields;
10230     @java.lang.Override
10231     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()10232         getUnknownFields() {
10233       return this.unknownFields;
10234     }
QualifierFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10235     private QualifierFilter(
10236         com.google.protobuf.CodedInputStream input,
10237         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10238         throws com.google.protobuf.InvalidProtocolBufferException {
10239       initFields();
10240       int mutable_bitField0_ = 0;
10241       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10242           com.google.protobuf.UnknownFieldSet.newBuilder();
10243       try {
10244         boolean done = false;
10245         while (!done) {
10246           int tag = input.readTag();
10247           switch (tag) {
10248             case 0:
10249               done = true;
10250               break;
10251             default: {
10252               if (!parseUnknownField(input, unknownFields,
10253                                      extensionRegistry, tag)) {
10254                 done = true;
10255               }
10256               break;
10257             }
10258             case 10: {
10259               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
10260               if (((bitField0_ & 0x00000001) == 0x00000001)) {
10261                 subBuilder = compareFilter_.toBuilder();
10262               }
10263               compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
10264               if (subBuilder != null) {
10265                 subBuilder.mergeFrom(compareFilter_);
10266                 compareFilter_ = subBuilder.buildPartial();
10267               }
10268               bitField0_ |= 0x00000001;
10269               break;
10270             }
10271           }
10272         }
10273       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10274         throw e.setUnfinishedMessage(this);
10275       } catch (java.io.IOException e) {
10276         throw new com.google.protobuf.InvalidProtocolBufferException(
10277             e.getMessage()).setUnfinishedMessage(this);
10278       } finally {
10279         this.unknownFields = unknownFields.build();
10280         makeExtensionsImmutable();
10281       }
10282     }
10283     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10284         getDescriptor() {
10285       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor;
10286     }
10287 
10288     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10289         internalGetFieldAccessorTable() {
10290       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable
10291           .ensureFieldAccessorsInitialized(
10292               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class);
10293     }
10294 
10295     public static com.google.protobuf.Parser<QualifierFilter> PARSER =
10296         new com.google.protobuf.AbstractParser<QualifierFilter>() {
10297       public QualifierFilter parsePartialFrom(
10298           com.google.protobuf.CodedInputStream input,
10299           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10300           throws com.google.protobuf.InvalidProtocolBufferException {
10301         return new QualifierFilter(input, extensionRegistry);
10302       }
10303     };
10304 
10305     @java.lang.Override
getParserForType()10306     public com.google.protobuf.Parser<QualifierFilter> getParserForType() {
10307       return PARSER;
10308     }
10309 
10310     private int bitField0_;
10311     // required .CompareFilter compare_filter = 1;
10312     public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
10313     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
10314     /**
10315      * <code>required .CompareFilter compare_filter = 1;</code>
10316      */
hasCompareFilter()10317     public boolean hasCompareFilter() {
10318       return ((bitField0_ & 0x00000001) == 0x00000001);
10319     }
10320     /**
10321      * <code>required .CompareFilter compare_filter = 1;</code>
10322      */
getCompareFilter()10323     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
10324       return compareFilter_;
10325     }
10326     /**
10327      * <code>required .CompareFilter compare_filter = 1;</code>
10328      */
getCompareFilterOrBuilder()10329     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
10330       return compareFilter_;
10331     }
10332 
initFields()10333     private void initFields() {
10334       compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
10335     }
10336     private byte memoizedIsInitialized = -1;
isInitialized()10337     public final boolean isInitialized() {
10338       byte isInitialized = memoizedIsInitialized;
10339       if (isInitialized != -1) return isInitialized == 1;
10340 
10341       if (!hasCompareFilter()) {
10342         memoizedIsInitialized = 0;
10343         return false;
10344       }
10345       if (!getCompareFilter().isInitialized()) {
10346         memoizedIsInitialized = 0;
10347         return false;
10348       }
10349       memoizedIsInitialized = 1;
10350       return true;
10351     }
10352 
writeTo(com.google.protobuf.CodedOutputStream output)10353     public void writeTo(com.google.protobuf.CodedOutputStream output)
10354                         throws java.io.IOException {
10355       getSerializedSize();
10356       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10357         output.writeMessage(1, compareFilter_);
10358       }
10359       getUnknownFields().writeTo(output);
10360     }
10361 
10362     private int memoizedSerializedSize = -1;
getSerializedSize()10363     public int getSerializedSize() {
10364       int size = memoizedSerializedSize;
10365       if (size != -1) return size;
10366 
10367       size = 0;
10368       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10369         size += com.google.protobuf.CodedOutputStream
10370           .computeMessageSize(1, compareFilter_);
10371       }
10372       size += getUnknownFields().getSerializedSize();
10373       memoizedSerializedSize = size;
10374       return size;
10375     }
10376 
10377     private static final long serialVersionUID = 0L;
10378     @java.lang.Override
writeReplace()10379     protected java.lang.Object writeReplace()
10380         throws java.io.ObjectStreamException {
10381       return super.writeReplace();
10382     }
10383 
10384     @java.lang.Override
equals(final java.lang.Object obj)10385     public boolean equals(final java.lang.Object obj) {
10386       if (obj == this) {
10387        return true;
10388       }
10389       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)) {
10390         return super.equals(obj);
10391       }
10392       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) obj;
10393 
10394       boolean result = true;
10395       result = result && (hasCompareFilter() == other.hasCompareFilter());
10396       if (hasCompareFilter()) {
10397         result = result && getCompareFilter()
10398             .equals(other.getCompareFilter());
10399       }
10400       result = result &&
10401           getUnknownFields().equals(other.getUnknownFields());
10402       return result;
10403     }
10404 
10405     private int memoizedHashCode = 0;
10406     @java.lang.Override
hashCode()10407     public int hashCode() {
10408       if (memoizedHashCode != 0) {
10409         return memoizedHashCode;
10410       }
10411       int hash = 41;
10412       hash = (19 * hash) + getDescriptorForType().hashCode();
10413       if (hasCompareFilter()) {
10414         hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
10415         hash = (53 * hash) + getCompareFilter().hashCode();
10416       }
10417       hash = (29 * hash) + getUnknownFields().hashCode();
10418       memoizedHashCode = hash;
10419       return hash;
10420     }
10421 
parseFrom( com.google.protobuf.ByteString data)10422     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
10423         com.google.protobuf.ByteString data)
10424         throws com.google.protobuf.InvalidProtocolBufferException {
10425       return PARSER.parseFrom(data);
10426     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10427     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
10428         com.google.protobuf.ByteString data,
10429         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10430         throws com.google.protobuf.InvalidProtocolBufferException {
10431       return PARSER.parseFrom(data, extensionRegistry);
10432     }
parseFrom(byte[] data)10433     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(byte[] data)
10434         throws com.google.protobuf.InvalidProtocolBufferException {
10435       return PARSER.parseFrom(data);
10436     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10437     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
10438         byte[] data,
10439         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10440         throws com.google.protobuf.InvalidProtocolBufferException {
10441       return PARSER.parseFrom(data, extensionRegistry);
10442     }
parseFrom(java.io.InputStream input)10443     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(java.io.InputStream input)
10444         throws java.io.IOException {
10445       return PARSER.parseFrom(input);
10446     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10447     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
10448         java.io.InputStream input,
10449         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10450         throws java.io.IOException {
10451       return PARSER.parseFrom(input, extensionRegistry);
10452     }
parseDelimitedFrom(java.io.InputStream input)10453     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(java.io.InputStream input)
10454         throws java.io.IOException {
10455       return PARSER.parseDelimitedFrom(input);
10456     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10457     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseDelimitedFrom(
10458         java.io.InputStream input,
10459         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10460         throws java.io.IOException {
10461       return PARSER.parseDelimitedFrom(input, extensionRegistry);
10462     }
parseFrom( com.google.protobuf.CodedInputStream input)10463     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
10464         com.google.protobuf.CodedInputStream input)
10465         throws java.io.IOException {
10466       return PARSER.parseFrom(input);
10467     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10468     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parseFrom(
10469         com.google.protobuf.CodedInputStream input,
10470         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10471         throws java.io.IOException {
10472       return PARSER.parseFrom(input, extensionRegistry);
10473     }
10474 
newBuilder()10475     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()10476     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype)10477     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter prototype) {
10478       return newBuilder().mergeFrom(prototype);
10479     }
toBuilder()10480     public Builder toBuilder() { return newBuilder(this); }
10481 
10482     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10483     protected Builder newBuilderForType(
10484         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10485       Builder builder = new Builder(parent);
10486       return builder;
10487     }
10488     /**
10489      * Protobuf type {@code QualifierFilter}
10490      */
10491     public static final class Builder extends
10492         com.google.protobuf.GeneratedMessage.Builder<Builder>
10493        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilterOrBuilder {
10494       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10495           getDescriptor() {
10496         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor;
10497       }
10498 
10499       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10500           internalGetFieldAccessorTable() {
10501         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_fieldAccessorTable
10502             .ensureFieldAccessorsInitialized(
10503                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.Builder.class);
10504       }
10505 
10506       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.newBuilder()
Builder()10507       private Builder() {
10508         maybeForceBuilderInitialization();
10509       }
10510 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10511       private Builder(
10512           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10513         super(parent);
10514         maybeForceBuilderInitialization();
10515       }
maybeForceBuilderInitialization()10516       private void maybeForceBuilderInitialization() {
10517         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10518           getCompareFilterFieldBuilder();
10519         }
10520       }
create()10521       private static Builder create() {
10522         return new Builder();
10523       }
10524 
clear()10525       public Builder clear() {
10526         super.clear();
10527         if (compareFilterBuilder_ == null) {
10528           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
10529         } else {
10530           compareFilterBuilder_.clear();
10531         }
10532         bitField0_ = (bitField0_ & ~0x00000001);
10533         return this;
10534       }
10535 
clone()10536       public Builder clone() {
10537         return create().mergeFrom(buildPartial());
10538       }
10539 
10540       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()10541           getDescriptorForType() {
10542         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_QualifierFilter_descriptor;
10543       }
10544 
getDefaultInstanceForType()10545       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter getDefaultInstanceForType() {
10546         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance();
10547       }
10548 
build()10549       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter build() {
10550         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = buildPartial();
10551         if (!result.isInitialized()) {
10552           throw newUninitializedMessageException(result);
10553         }
10554         return result;
10555       }
10556 
buildPartial()10557       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter buildPartial() {
10558         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter(this);
10559         int from_bitField0_ = bitField0_;
10560         int to_bitField0_ = 0;
10561         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10562           to_bitField0_ |= 0x00000001;
10563         }
10564         if (compareFilterBuilder_ == null) {
10565           result.compareFilter_ = compareFilter_;
10566         } else {
10567           result.compareFilter_ = compareFilterBuilder_.build();
10568         }
10569         result.bitField0_ = to_bitField0_;
10570         onBuilt();
10571         return result;
10572       }
10573 
mergeFrom(com.google.protobuf.Message other)10574       public Builder mergeFrom(com.google.protobuf.Message other) {
10575         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) {
10576           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter)other);
10577         } else {
10578           super.mergeFrom(other);
10579           return this;
10580         }
10581       }
10582 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other)10583       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter other) {
10584         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter.getDefaultInstance()) return this;
10585         if (other.hasCompareFilter()) {
10586           mergeCompareFilter(other.getCompareFilter());
10587         }
10588         this.mergeUnknownFields(other.getUnknownFields());
10589         return this;
10590       }
10591 
isInitialized()10592       public final boolean isInitialized() {
10593         if (!hasCompareFilter()) {
10594 
10595           return false;
10596         }
10597         if (!getCompareFilter().isInitialized()) {
10598 
10599           return false;
10600         }
10601         return true;
10602       }
10603 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10604       public Builder mergeFrom(
10605           com.google.protobuf.CodedInputStream input,
10606           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10607           throws java.io.IOException {
10608         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter parsedMessage = null;
10609         try {
10610           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10611         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10612           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.QualifierFilter) e.getUnfinishedMessage();
10613           throw e;
10614         } finally {
10615           if (parsedMessage != null) {
10616             mergeFrom(parsedMessage);
10617           }
10618         }
10619         return this;
10620       }
10621       private int bitField0_;
10622 
10623       // required .CompareFilter compare_filter = 1;
10624       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
10625       private com.google.protobuf.SingleFieldBuilder<
10626           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
10627       /**
10628        * <code>required .CompareFilter compare_filter = 1;</code>
10629        */
hasCompareFilter()10630       public boolean hasCompareFilter() {
10631         return ((bitField0_ & 0x00000001) == 0x00000001);
10632       }
10633       /**
10634        * <code>required .CompareFilter compare_filter = 1;</code>
10635        */
getCompareFilter()10636       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
10637         if (compareFilterBuilder_ == null) {
10638           return compareFilter_;
10639         } else {
10640           return compareFilterBuilder_.getMessage();
10641         }
10642       }
10643       /**
10644        * <code>required .CompareFilter compare_filter = 1;</code>
10645        */
setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)10646       public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
10647         if (compareFilterBuilder_ == null) {
10648           if (value == null) {
10649             throw new NullPointerException();
10650           }
10651           compareFilter_ = value;
10652           onChanged();
10653         } else {
10654           compareFilterBuilder_.setMessage(value);
10655         }
10656         bitField0_ |= 0x00000001;
10657         return this;
10658       }
10659       /**
10660        * <code>required .CompareFilter compare_filter = 1;</code>
10661        */
setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)10662       public Builder setCompareFilter(
10663           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
10664         if (compareFilterBuilder_ == null) {
10665           compareFilter_ = builderForValue.build();
10666           onChanged();
10667         } else {
10668           compareFilterBuilder_.setMessage(builderForValue.build());
10669         }
10670         bitField0_ |= 0x00000001;
10671         return this;
10672       }
10673       /**
10674        * <code>required .CompareFilter compare_filter = 1;</code>
10675        */
mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)10676       public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
10677         if (compareFilterBuilder_ == null) {
10678           if (((bitField0_ & 0x00000001) == 0x00000001) &&
10679               compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
10680             compareFilter_ =
10681               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
10682           } else {
10683             compareFilter_ = value;
10684           }
10685           onChanged();
10686         } else {
10687           compareFilterBuilder_.mergeFrom(value);
10688         }
10689         bitField0_ |= 0x00000001;
10690         return this;
10691       }
10692       /**
10693        * <code>required .CompareFilter compare_filter = 1;</code>
10694        */
clearCompareFilter()10695       public Builder clearCompareFilter() {
10696         if (compareFilterBuilder_ == null) {
10697           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
10698           onChanged();
10699         } else {
10700           compareFilterBuilder_.clear();
10701         }
10702         bitField0_ = (bitField0_ & ~0x00000001);
10703         return this;
10704       }
10705       /**
10706        * <code>required .CompareFilter compare_filter = 1;</code>
10707        */
getCompareFilterBuilder()10708       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
10709         bitField0_ |= 0x00000001;
10710         onChanged();
10711         return getCompareFilterFieldBuilder().getBuilder();
10712       }
10713       /**
10714        * <code>required .CompareFilter compare_filter = 1;</code>
10715        */
getCompareFilterOrBuilder()10716       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
10717         if (compareFilterBuilder_ != null) {
10718           return compareFilterBuilder_.getMessageOrBuilder();
10719         } else {
10720           return compareFilter_;
10721         }
10722       }
10723       /**
10724        * <code>required .CompareFilter compare_filter = 1;</code>
10725        */
10726       private com.google.protobuf.SingleFieldBuilder<
10727           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder()10728           getCompareFilterFieldBuilder() {
10729         if (compareFilterBuilder_ == null) {
10730           compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
10731               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
10732                   compareFilter_,
10733                   getParentForChildren(),
10734                   isClean());
10735           compareFilter_ = null;
10736         }
10737         return compareFilterBuilder_;
10738       }
10739 
10740       // @@protoc_insertion_point(builder_scope:QualifierFilter)
10741     }
10742 
10743     static {
10744       defaultInstance = new QualifierFilter(true);
defaultInstance.initFields()10745       defaultInstance.initFields();
10746     }
10747 
10748     // @@protoc_insertion_point(class_scope:QualifierFilter)
10749   }
10750 
10751   public interface RandomRowFilterOrBuilder
10752       extends com.google.protobuf.MessageOrBuilder {
10753 
10754     // required float chance = 1;
10755     /**
10756      * <code>required float chance = 1;</code>
10757      */
hasChance()10758     boolean hasChance();
10759     /**
10760      * <code>required float chance = 1;</code>
10761      */
getChance()10762     float getChance();
10763   }
10764   /**
10765    * Protobuf type {@code RandomRowFilter}
10766    */
10767   public static final class RandomRowFilter extends
10768       com.google.protobuf.GeneratedMessage
10769       implements RandomRowFilterOrBuilder {
10770     // Use RandomRowFilter.newBuilder() to construct.
RandomRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)10771     private RandomRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
10772       super(builder);
10773       this.unknownFields = builder.getUnknownFields();
10774     }
RandomRowFilter(boolean noInit)10775     private RandomRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
10776 
10777     private static final RandomRowFilter defaultInstance;
getDefaultInstance()10778     public static RandomRowFilter getDefaultInstance() {
10779       return defaultInstance;
10780     }
10781 
getDefaultInstanceForType()10782     public RandomRowFilter getDefaultInstanceForType() {
10783       return defaultInstance;
10784     }
10785 
10786     private final com.google.protobuf.UnknownFieldSet unknownFields;
10787     @java.lang.Override
10788     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()10789         getUnknownFields() {
10790       return this.unknownFields;
10791     }
RandomRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10792     private RandomRowFilter(
10793         com.google.protobuf.CodedInputStream input,
10794         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10795         throws com.google.protobuf.InvalidProtocolBufferException {
10796       initFields();
10797       int mutable_bitField0_ = 0;
10798       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
10799           com.google.protobuf.UnknownFieldSet.newBuilder();
10800       try {
10801         boolean done = false;
10802         while (!done) {
10803           int tag = input.readTag();
10804           switch (tag) {
10805             case 0:
10806               done = true;
10807               break;
10808             default: {
10809               if (!parseUnknownField(input, unknownFields,
10810                                      extensionRegistry, tag)) {
10811                 done = true;
10812               }
10813               break;
10814             }
10815             case 13: {
10816               bitField0_ |= 0x00000001;
10817               chance_ = input.readFloat();
10818               break;
10819             }
10820           }
10821         }
10822       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10823         throw e.setUnfinishedMessage(this);
10824       } catch (java.io.IOException e) {
10825         throw new com.google.protobuf.InvalidProtocolBufferException(
10826             e.getMessage()).setUnfinishedMessage(this);
10827       } finally {
10828         this.unknownFields = unknownFields.build();
10829         makeExtensionsImmutable();
10830       }
10831     }
10832     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10833         getDescriptor() {
10834       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor;
10835     }
10836 
10837     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10838         internalGetFieldAccessorTable() {
10839       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable
10840           .ensureFieldAccessorsInitialized(
10841               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class);
10842     }
10843 
10844     public static com.google.protobuf.Parser<RandomRowFilter> PARSER =
10845         new com.google.protobuf.AbstractParser<RandomRowFilter>() {
10846       public RandomRowFilter parsePartialFrom(
10847           com.google.protobuf.CodedInputStream input,
10848           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10849           throws com.google.protobuf.InvalidProtocolBufferException {
10850         return new RandomRowFilter(input, extensionRegistry);
10851       }
10852     };
10853 
10854     @java.lang.Override
getParserForType()10855     public com.google.protobuf.Parser<RandomRowFilter> getParserForType() {
10856       return PARSER;
10857     }
10858 
10859     private int bitField0_;
10860     // required float chance = 1;
10861     public static final int CHANCE_FIELD_NUMBER = 1;
10862     private float chance_;
10863     /**
10864      * <code>required float chance = 1;</code>
10865      */
hasChance()10866     public boolean hasChance() {
10867       return ((bitField0_ & 0x00000001) == 0x00000001);
10868     }
10869     /**
10870      * <code>required float chance = 1;</code>
10871      */
getChance()10872     public float getChance() {
10873       return chance_;
10874     }
10875 
initFields()10876     private void initFields() {
10877       chance_ = 0F;
10878     }
10879     private byte memoizedIsInitialized = -1;
isInitialized()10880     public final boolean isInitialized() {
10881       byte isInitialized = memoizedIsInitialized;
10882       if (isInitialized != -1) return isInitialized == 1;
10883 
10884       if (!hasChance()) {
10885         memoizedIsInitialized = 0;
10886         return false;
10887       }
10888       memoizedIsInitialized = 1;
10889       return true;
10890     }
10891 
writeTo(com.google.protobuf.CodedOutputStream output)10892     public void writeTo(com.google.protobuf.CodedOutputStream output)
10893                         throws java.io.IOException {
10894       getSerializedSize();
10895       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10896         output.writeFloat(1, chance_);
10897       }
10898       getUnknownFields().writeTo(output);
10899     }
10900 
10901     private int memoizedSerializedSize = -1;
getSerializedSize()10902     public int getSerializedSize() {
10903       int size = memoizedSerializedSize;
10904       if (size != -1) return size;
10905 
10906       size = 0;
10907       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10908         size += com.google.protobuf.CodedOutputStream
10909           .computeFloatSize(1, chance_);
10910       }
10911       size += getUnknownFields().getSerializedSize();
10912       memoizedSerializedSize = size;
10913       return size;
10914     }
10915 
10916     private static final long serialVersionUID = 0L;
10917     @java.lang.Override
writeReplace()10918     protected java.lang.Object writeReplace()
10919         throws java.io.ObjectStreamException {
10920       return super.writeReplace();
10921     }
10922 
10923     @java.lang.Override
equals(final java.lang.Object obj)10924     public boolean equals(final java.lang.Object obj) {
10925       if (obj == this) {
10926        return true;
10927       }
10928       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)) {
10929         return super.equals(obj);
10930       }
10931       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) obj;
10932 
10933       boolean result = true;
10934       result = result && (hasChance() == other.hasChance());
10935       if (hasChance()) {
10936         result = result && (Float.floatToIntBits(getChance())    == Float.floatToIntBits(other.getChance()));
10937       }
10938       result = result &&
10939           getUnknownFields().equals(other.getUnknownFields());
10940       return result;
10941     }
10942 
10943     private int memoizedHashCode = 0;
10944     @java.lang.Override
hashCode()10945     public int hashCode() {
10946       if (memoizedHashCode != 0) {
10947         return memoizedHashCode;
10948       }
10949       int hash = 41;
10950       hash = (19 * hash) + getDescriptorForType().hashCode();
10951       if (hasChance()) {
10952         hash = (37 * hash) + CHANCE_FIELD_NUMBER;
10953         hash = (53 * hash) + Float.floatToIntBits(
10954             getChance());
10955       }
10956       hash = (29 * hash) + getUnknownFields().hashCode();
10957       memoizedHashCode = hash;
10958       return hash;
10959     }
10960 
parseFrom( com.google.protobuf.ByteString data)10961     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
10962         com.google.protobuf.ByteString data)
10963         throws com.google.protobuf.InvalidProtocolBufferException {
10964       return PARSER.parseFrom(data);
10965     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10966     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
10967         com.google.protobuf.ByteString data,
10968         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10969         throws com.google.protobuf.InvalidProtocolBufferException {
10970       return PARSER.parseFrom(data, extensionRegistry);
10971     }
parseFrom(byte[] data)10972     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(byte[] data)
10973         throws com.google.protobuf.InvalidProtocolBufferException {
10974       return PARSER.parseFrom(data);
10975     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10976     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
10977         byte[] data,
10978         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10979         throws com.google.protobuf.InvalidProtocolBufferException {
10980       return PARSER.parseFrom(data, extensionRegistry);
10981     }
parseFrom(java.io.InputStream input)10982     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(java.io.InputStream input)
10983         throws java.io.IOException {
10984       return PARSER.parseFrom(input);
10985     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10986     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
10987         java.io.InputStream input,
10988         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10989         throws java.io.IOException {
10990       return PARSER.parseFrom(input, extensionRegistry);
10991     }
parseDelimitedFrom(java.io.InputStream input)10992     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(java.io.InputStream input)
10993         throws java.io.IOException {
10994       return PARSER.parseDelimitedFrom(input);
10995     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10996     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseDelimitedFrom(
10997         java.io.InputStream input,
10998         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10999         throws java.io.IOException {
11000       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11001     }
parseFrom( com.google.protobuf.CodedInputStream input)11002     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
11003         com.google.protobuf.CodedInputStream input)
11004         throws java.io.IOException {
11005       return PARSER.parseFrom(input);
11006     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11007     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parseFrom(
11008         com.google.protobuf.CodedInputStream input,
11009         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11010         throws java.io.IOException {
11011       return PARSER.parseFrom(input, extensionRegistry);
11012     }
11013 
newBuilder()11014     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()11015     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype)11016     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter prototype) {
11017       return newBuilder().mergeFrom(prototype);
11018     }
toBuilder()11019     public Builder toBuilder() { return newBuilder(this); }
11020 
11021     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11022     protected Builder newBuilderForType(
11023         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11024       Builder builder = new Builder(parent);
11025       return builder;
11026     }
11027     /**
11028      * Protobuf type {@code RandomRowFilter}
11029      */
11030     public static final class Builder extends
11031         com.google.protobuf.GeneratedMessage.Builder<Builder>
11032        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilterOrBuilder {
11033       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11034           getDescriptor() {
11035         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor;
11036       }
11037 
11038       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11039           internalGetFieldAccessorTable() {
11040         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_fieldAccessorTable
11041             .ensureFieldAccessorsInitialized(
11042                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.Builder.class);
11043       }
11044 
11045       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.newBuilder()
Builder()11046       private Builder() {
11047         maybeForceBuilderInitialization();
11048       }
11049 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11050       private Builder(
11051           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11052         super(parent);
11053         maybeForceBuilderInitialization();
11054       }
maybeForceBuilderInitialization()11055       private void maybeForceBuilderInitialization() {
11056         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11057         }
11058       }
create()11059       private static Builder create() {
11060         return new Builder();
11061       }
11062 
clear()11063       public Builder clear() {
11064         super.clear();
11065         chance_ = 0F;
11066         bitField0_ = (bitField0_ & ~0x00000001);
11067         return this;
11068       }
11069 
clone()11070       public Builder clone() {
11071         return create().mergeFrom(buildPartial());
11072       }
11073 
11074       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()11075           getDescriptorForType() {
11076         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RandomRowFilter_descriptor;
11077       }
11078 
getDefaultInstanceForType()11079       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter getDefaultInstanceForType() {
11080         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance();
11081       }
11082 
build()11083       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter build() {
11084         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = buildPartial();
11085         if (!result.isInitialized()) {
11086           throw newUninitializedMessageException(result);
11087         }
11088         return result;
11089       }
11090 
buildPartial()11091       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter buildPartial() {
11092         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter(this);
11093         int from_bitField0_ = bitField0_;
11094         int to_bitField0_ = 0;
11095         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11096           to_bitField0_ |= 0x00000001;
11097         }
11098         result.chance_ = chance_;
11099         result.bitField0_ = to_bitField0_;
11100         onBuilt();
11101         return result;
11102       }
11103 
mergeFrom(com.google.protobuf.Message other)11104       public Builder mergeFrom(com.google.protobuf.Message other) {
11105         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) {
11106           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter)other);
11107         } else {
11108           super.mergeFrom(other);
11109           return this;
11110         }
11111       }
11112 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other)11113       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter other) {
11114         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter.getDefaultInstance()) return this;
11115         if (other.hasChance()) {
11116           setChance(other.getChance());
11117         }
11118         this.mergeUnknownFields(other.getUnknownFields());
11119         return this;
11120       }
11121 
isInitialized()11122       public final boolean isInitialized() {
11123         if (!hasChance()) {
11124 
11125           return false;
11126         }
11127         return true;
11128       }
11129 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11130       public Builder mergeFrom(
11131           com.google.protobuf.CodedInputStream input,
11132           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11133           throws java.io.IOException {
11134         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter parsedMessage = null;
11135         try {
11136           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11137         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11138           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RandomRowFilter) e.getUnfinishedMessage();
11139           throw e;
11140         } finally {
11141           if (parsedMessage != null) {
11142             mergeFrom(parsedMessage);
11143           }
11144         }
11145         return this;
11146       }
11147       private int bitField0_;
11148 
11149       // required float chance = 1;
11150       private float chance_ ;
11151       /**
11152        * <code>required float chance = 1;</code>
11153        */
hasChance()11154       public boolean hasChance() {
11155         return ((bitField0_ & 0x00000001) == 0x00000001);
11156       }
11157       /**
11158        * <code>required float chance = 1;</code>
11159        */
getChance()11160       public float getChance() {
11161         return chance_;
11162       }
11163       /**
11164        * <code>required float chance = 1;</code>
11165        */
setChance(float value)11166       public Builder setChance(float value) {
11167         bitField0_ |= 0x00000001;
11168         chance_ = value;
11169         onChanged();
11170         return this;
11171       }
11172       /**
11173        * <code>required float chance = 1;</code>
11174        */
clearChance()11175       public Builder clearChance() {
11176         bitField0_ = (bitField0_ & ~0x00000001);
11177         chance_ = 0F;
11178         onChanged();
11179         return this;
11180       }
11181 
11182       // @@protoc_insertion_point(builder_scope:RandomRowFilter)
11183     }
11184 
11185     static {
11186       defaultInstance = new RandomRowFilter(true);
defaultInstance.initFields()11187       defaultInstance.initFields();
11188     }
11189 
11190     // @@protoc_insertion_point(class_scope:RandomRowFilter)
11191   }
11192 
11193   public interface RowFilterOrBuilder
11194       extends com.google.protobuf.MessageOrBuilder {
11195 
11196     // required .CompareFilter compare_filter = 1;
11197     /**
11198      * <code>required .CompareFilter compare_filter = 1;</code>
11199      */
hasCompareFilter()11200     boolean hasCompareFilter();
11201     /**
11202      * <code>required .CompareFilter compare_filter = 1;</code>
11203      */
getCompareFilter()11204     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
11205     /**
11206      * <code>required .CompareFilter compare_filter = 1;</code>
11207      */
getCompareFilterOrBuilder()11208     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
11209   }
11210   /**
11211    * Protobuf type {@code RowFilter}
11212    */
11213   public static final class RowFilter extends
11214       com.google.protobuf.GeneratedMessage
11215       implements RowFilterOrBuilder {
11216     // Use RowFilter.newBuilder() to construct.
RowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)11217     private RowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11218       super(builder);
11219       this.unknownFields = builder.getUnknownFields();
11220     }
RowFilter(boolean noInit)11221     private RowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11222 
11223     private static final RowFilter defaultInstance;
getDefaultInstance()11224     public static RowFilter getDefaultInstance() {
11225       return defaultInstance;
11226     }
11227 
getDefaultInstanceForType()11228     public RowFilter getDefaultInstanceForType() {
11229       return defaultInstance;
11230     }
11231 
11232     private final com.google.protobuf.UnknownFieldSet unknownFields;
11233     @java.lang.Override
11234     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()11235         getUnknownFields() {
11236       return this.unknownFields;
11237     }
RowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11238     private RowFilter(
11239         com.google.protobuf.CodedInputStream input,
11240         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11241         throws com.google.protobuf.InvalidProtocolBufferException {
11242       initFields();
11243       int mutable_bitField0_ = 0;
11244       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11245           com.google.protobuf.UnknownFieldSet.newBuilder();
11246       try {
11247         boolean done = false;
11248         while (!done) {
11249           int tag = input.readTag();
11250           switch (tag) {
11251             case 0:
11252               done = true;
11253               break;
11254             default: {
11255               if (!parseUnknownField(input, unknownFields,
11256                                      extensionRegistry, tag)) {
11257                 done = true;
11258               }
11259               break;
11260             }
11261             case 10: {
11262               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
11263               if (((bitField0_ & 0x00000001) == 0x00000001)) {
11264                 subBuilder = compareFilter_.toBuilder();
11265               }
11266               compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
11267               if (subBuilder != null) {
11268                 subBuilder.mergeFrom(compareFilter_);
11269                 compareFilter_ = subBuilder.buildPartial();
11270               }
11271               bitField0_ |= 0x00000001;
11272               break;
11273             }
11274           }
11275         }
11276       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11277         throw e.setUnfinishedMessage(this);
11278       } catch (java.io.IOException e) {
11279         throw new com.google.protobuf.InvalidProtocolBufferException(
11280             e.getMessage()).setUnfinishedMessage(this);
11281       } finally {
11282         this.unknownFields = unknownFields.build();
11283         makeExtensionsImmutable();
11284       }
11285     }
11286     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11287         getDescriptor() {
11288       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor;
11289     }
11290 
11291     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11292         internalGetFieldAccessorTable() {
11293       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable
11294           .ensureFieldAccessorsInitialized(
11295               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class);
11296     }
11297 
11298     public static com.google.protobuf.Parser<RowFilter> PARSER =
11299         new com.google.protobuf.AbstractParser<RowFilter>() {
11300       public RowFilter parsePartialFrom(
11301           com.google.protobuf.CodedInputStream input,
11302           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11303           throws com.google.protobuf.InvalidProtocolBufferException {
11304         return new RowFilter(input, extensionRegistry);
11305       }
11306     };
11307 
11308     @java.lang.Override
getParserForType()11309     public com.google.protobuf.Parser<RowFilter> getParserForType() {
11310       return PARSER;
11311     }
11312 
11313     private int bitField0_;
11314     // required .CompareFilter compare_filter = 1;
11315     public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
11316     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
11317     /**
11318      * <code>required .CompareFilter compare_filter = 1;</code>
11319      */
hasCompareFilter()11320     public boolean hasCompareFilter() {
11321       return ((bitField0_ & 0x00000001) == 0x00000001);
11322     }
11323     /**
11324      * <code>required .CompareFilter compare_filter = 1;</code>
11325      */
getCompareFilter()11326     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
11327       return compareFilter_;
11328     }
11329     /**
11330      * <code>required .CompareFilter compare_filter = 1;</code>
11331      */
getCompareFilterOrBuilder()11332     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
11333       return compareFilter_;
11334     }
11335 
initFields()11336     private void initFields() {
11337       compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
11338     }
11339     private byte memoizedIsInitialized = -1;
isInitialized()11340     public final boolean isInitialized() {
11341       byte isInitialized = memoizedIsInitialized;
11342       if (isInitialized != -1) return isInitialized == 1;
11343 
11344       if (!hasCompareFilter()) {
11345         memoizedIsInitialized = 0;
11346         return false;
11347       }
11348       if (!getCompareFilter().isInitialized()) {
11349         memoizedIsInitialized = 0;
11350         return false;
11351       }
11352       memoizedIsInitialized = 1;
11353       return true;
11354     }
11355 
writeTo(com.google.protobuf.CodedOutputStream output)11356     public void writeTo(com.google.protobuf.CodedOutputStream output)
11357                         throws java.io.IOException {
11358       getSerializedSize();
11359       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11360         output.writeMessage(1, compareFilter_);
11361       }
11362       getUnknownFields().writeTo(output);
11363     }
11364 
11365     private int memoizedSerializedSize = -1;
getSerializedSize()11366     public int getSerializedSize() {
11367       int size = memoizedSerializedSize;
11368       if (size != -1) return size;
11369 
11370       size = 0;
11371       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11372         size += com.google.protobuf.CodedOutputStream
11373           .computeMessageSize(1, compareFilter_);
11374       }
11375       size += getUnknownFields().getSerializedSize();
11376       memoizedSerializedSize = size;
11377       return size;
11378     }
11379 
11380     private static final long serialVersionUID = 0L;
11381     @java.lang.Override
writeReplace()11382     protected java.lang.Object writeReplace()
11383         throws java.io.ObjectStreamException {
11384       return super.writeReplace();
11385     }
11386 
11387     @java.lang.Override
equals(final java.lang.Object obj)11388     public boolean equals(final java.lang.Object obj) {
11389       if (obj == this) {
11390        return true;
11391       }
11392       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)) {
11393         return super.equals(obj);
11394       }
11395       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) obj;
11396 
11397       boolean result = true;
11398       result = result && (hasCompareFilter() == other.hasCompareFilter());
11399       if (hasCompareFilter()) {
11400         result = result && getCompareFilter()
11401             .equals(other.getCompareFilter());
11402       }
11403       result = result &&
11404           getUnknownFields().equals(other.getUnknownFields());
11405       return result;
11406     }
11407 
11408     private int memoizedHashCode = 0;
11409     @java.lang.Override
hashCode()11410     public int hashCode() {
11411       if (memoizedHashCode != 0) {
11412         return memoizedHashCode;
11413       }
11414       int hash = 41;
11415       hash = (19 * hash) + getDescriptorForType().hashCode();
11416       if (hasCompareFilter()) {
11417         hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
11418         hash = (53 * hash) + getCompareFilter().hashCode();
11419       }
11420       hash = (29 * hash) + getUnknownFields().hashCode();
11421       memoizedHashCode = hash;
11422       return hash;
11423     }
11424 
parseFrom( com.google.protobuf.ByteString data)11425     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
11426         com.google.protobuf.ByteString data)
11427         throws com.google.protobuf.InvalidProtocolBufferException {
11428       return PARSER.parseFrom(data);
11429     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11430     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
11431         com.google.protobuf.ByteString data,
11432         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11433         throws com.google.protobuf.InvalidProtocolBufferException {
11434       return PARSER.parseFrom(data, extensionRegistry);
11435     }
parseFrom(byte[] data)11436     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(byte[] data)
11437         throws com.google.protobuf.InvalidProtocolBufferException {
11438       return PARSER.parseFrom(data);
11439     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11440     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
11441         byte[] data,
11442         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11443         throws com.google.protobuf.InvalidProtocolBufferException {
11444       return PARSER.parseFrom(data, extensionRegistry);
11445     }
parseFrom(java.io.InputStream input)11446     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(java.io.InputStream input)
11447         throws java.io.IOException {
11448       return PARSER.parseFrom(input);
11449     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11450     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
11451         java.io.InputStream input,
11452         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11453         throws java.io.IOException {
11454       return PARSER.parseFrom(input, extensionRegistry);
11455     }
parseDelimitedFrom(java.io.InputStream input)11456     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(java.io.InputStream input)
11457         throws java.io.IOException {
11458       return PARSER.parseDelimitedFrom(input);
11459     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11460     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseDelimitedFrom(
11461         java.io.InputStream input,
11462         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11463         throws java.io.IOException {
11464       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11465     }
parseFrom( com.google.protobuf.CodedInputStream input)11466     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
11467         com.google.protobuf.CodedInputStream input)
11468         throws java.io.IOException {
11469       return PARSER.parseFrom(input);
11470     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11471     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parseFrom(
11472         com.google.protobuf.CodedInputStream input,
11473         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11474         throws java.io.IOException {
11475       return PARSER.parseFrom(input, extensionRegistry);
11476     }
11477 
newBuilder()11478     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()11479     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype)11480     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter prototype) {
11481       return newBuilder().mergeFrom(prototype);
11482     }
toBuilder()11483     public Builder toBuilder() { return newBuilder(this); }
11484 
11485     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11486     protected Builder newBuilderForType(
11487         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11488       Builder builder = new Builder(parent);
11489       return builder;
11490     }
11491     /**
11492      * Protobuf type {@code RowFilter}
11493      */
11494     public static final class Builder extends
11495         com.google.protobuf.GeneratedMessage.Builder<Builder>
11496        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilterOrBuilder {
11497       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11498           getDescriptor() {
11499         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor;
11500       }
11501 
11502       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11503           internalGetFieldAccessorTable() {
11504         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_fieldAccessorTable
11505             .ensureFieldAccessorsInitialized(
11506                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.Builder.class);
11507       }
11508 
11509       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.newBuilder()
Builder()11510       private Builder() {
11511         maybeForceBuilderInitialization();
11512       }
11513 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11514       private Builder(
11515           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11516         super(parent);
11517         maybeForceBuilderInitialization();
11518       }
maybeForceBuilderInitialization()11519       private void maybeForceBuilderInitialization() {
11520         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11521           getCompareFilterFieldBuilder();
11522         }
11523       }
create()11524       private static Builder create() {
11525         return new Builder();
11526       }
11527 
clear()11528       public Builder clear() {
11529         super.clear();
11530         if (compareFilterBuilder_ == null) {
11531           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
11532         } else {
11533           compareFilterBuilder_.clear();
11534         }
11535         bitField0_ = (bitField0_ & ~0x00000001);
11536         return this;
11537       }
11538 
clone()11539       public Builder clone() {
11540         return create().mergeFrom(buildPartial());
11541       }
11542 
11543       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()11544           getDescriptorForType() {
11545         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowFilter_descriptor;
11546       }
11547 
getDefaultInstanceForType()11548       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter getDefaultInstanceForType() {
11549         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance();
11550       }
11551 
build()11552       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter build() {
11553         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = buildPartial();
11554         if (!result.isInitialized()) {
11555           throw newUninitializedMessageException(result);
11556         }
11557         return result;
11558       }
11559 
buildPartial()11560       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter buildPartial() {
11561         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter(this);
11562         int from_bitField0_ = bitField0_;
11563         int to_bitField0_ = 0;
11564         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11565           to_bitField0_ |= 0x00000001;
11566         }
11567         if (compareFilterBuilder_ == null) {
11568           result.compareFilter_ = compareFilter_;
11569         } else {
11570           result.compareFilter_ = compareFilterBuilder_.build();
11571         }
11572         result.bitField0_ = to_bitField0_;
11573         onBuilt();
11574         return result;
11575       }
11576 
mergeFrom(com.google.protobuf.Message other)11577       public Builder mergeFrom(com.google.protobuf.Message other) {
11578         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) {
11579           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter)other);
11580         } else {
11581           super.mergeFrom(other);
11582           return this;
11583         }
11584       }
11585 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other)11586       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter other) {
11587         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter.getDefaultInstance()) return this;
11588         if (other.hasCompareFilter()) {
11589           mergeCompareFilter(other.getCompareFilter());
11590         }
11591         this.mergeUnknownFields(other.getUnknownFields());
11592         return this;
11593       }
11594 
isInitialized()11595       public final boolean isInitialized() {
11596         if (!hasCompareFilter()) {
11597 
11598           return false;
11599         }
11600         if (!getCompareFilter().isInitialized()) {
11601 
11602           return false;
11603         }
11604         return true;
11605       }
11606 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11607       public Builder mergeFrom(
11608           com.google.protobuf.CodedInputStream input,
11609           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11610           throws java.io.IOException {
11611         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter parsedMessage = null;
11612         try {
11613           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11614         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11615           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowFilter) e.getUnfinishedMessage();
11616           throw e;
11617         } finally {
11618           if (parsedMessage != null) {
11619             mergeFrom(parsedMessage);
11620           }
11621         }
11622         return this;
11623       }
11624       private int bitField0_;
11625 
11626       // required .CompareFilter compare_filter = 1;
11627       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
11628       private com.google.protobuf.SingleFieldBuilder<
11629           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
11630       /**
11631        * <code>required .CompareFilter compare_filter = 1;</code>
11632        */
hasCompareFilter()11633       public boolean hasCompareFilter() {
11634         return ((bitField0_ & 0x00000001) == 0x00000001);
11635       }
11636       /**
11637        * <code>required .CompareFilter compare_filter = 1;</code>
11638        */
getCompareFilter()11639       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
11640         if (compareFilterBuilder_ == null) {
11641           return compareFilter_;
11642         } else {
11643           return compareFilterBuilder_.getMessage();
11644         }
11645       }
11646       /**
11647        * <code>required .CompareFilter compare_filter = 1;</code>
11648        */
setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)11649       public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
11650         if (compareFilterBuilder_ == null) {
11651           if (value == null) {
11652             throw new NullPointerException();
11653           }
11654           compareFilter_ = value;
11655           onChanged();
11656         } else {
11657           compareFilterBuilder_.setMessage(value);
11658         }
11659         bitField0_ |= 0x00000001;
11660         return this;
11661       }
11662       /**
11663        * <code>required .CompareFilter compare_filter = 1;</code>
11664        */
setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)11665       public Builder setCompareFilter(
11666           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
11667         if (compareFilterBuilder_ == null) {
11668           compareFilter_ = builderForValue.build();
11669           onChanged();
11670         } else {
11671           compareFilterBuilder_.setMessage(builderForValue.build());
11672         }
11673         bitField0_ |= 0x00000001;
11674         return this;
11675       }
11676       /**
11677        * <code>required .CompareFilter compare_filter = 1;</code>
11678        */
mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)11679       public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
11680         if (compareFilterBuilder_ == null) {
11681           if (((bitField0_ & 0x00000001) == 0x00000001) &&
11682               compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
11683             compareFilter_ =
11684               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
11685           } else {
11686             compareFilter_ = value;
11687           }
11688           onChanged();
11689         } else {
11690           compareFilterBuilder_.mergeFrom(value);
11691         }
11692         bitField0_ |= 0x00000001;
11693         return this;
11694       }
11695       /**
11696        * <code>required .CompareFilter compare_filter = 1;</code>
11697        */
clearCompareFilter()11698       public Builder clearCompareFilter() {
11699         if (compareFilterBuilder_ == null) {
11700           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
11701           onChanged();
11702         } else {
11703           compareFilterBuilder_.clear();
11704         }
11705         bitField0_ = (bitField0_ & ~0x00000001);
11706         return this;
11707       }
11708       /**
11709        * <code>required .CompareFilter compare_filter = 1;</code>
11710        */
getCompareFilterBuilder()11711       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
11712         bitField0_ |= 0x00000001;
11713         onChanged();
11714         return getCompareFilterFieldBuilder().getBuilder();
11715       }
11716       /**
11717        * <code>required .CompareFilter compare_filter = 1;</code>
11718        */
getCompareFilterOrBuilder()11719       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
11720         if (compareFilterBuilder_ != null) {
11721           return compareFilterBuilder_.getMessageOrBuilder();
11722         } else {
11723           return compareFilter_;
11724         }
11725       }
11726       /**
11727        * <code>required .CompareFilter compare_filter = 1;</code>
11728        */
11729       private com.google.protobuf.SingleFieldBuilder<
11730           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder()11731           getCompareFilterFieldBuilder() {
11732         if (compareFilterBuilder_ == null) {
11733           compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
11734               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
11735                   compareFilter_,
11736                   getParentForChildren(),
11737                   isClean());
11738           compareFilter_ = null;
11739         }
11740         return compareFilterBuilder_;
11741       }
11742 
11743       // @@protoc_insertion_point(builder_scope:RowFilter)
11744     }
11745 
11746     static {
11747       defaultInstance = new RowFilter(true);
defaultInstance.initFields()11748       defaultInstance.initFields();
11749     }
11750 
11751     // @@protoc_insertion_point(class_scope:RowFilter)
11752   }
11753 
11754   public interface SingleColumnValueExcludeFilterOrBuilder
11755       extends com.google.protobuf.MessageOrBuilder {
11756 
11757     // required .SingleColumnValueFilter single_column_value_filter = 1;
11758     /**
11759      * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
11760      */
hasSingleColumnValueFilter()11761     boolean hasSingleColumnValueFilter();
11762     /**
11763      * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
11764      */
getSingleColumnValueFilter()11765     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter();
11766     /**
11767      * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
11768      */
getSingleColumnValueFilterOrBuilder()11769     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder();
11770   }
11771   /**
11772    * Protobuf type {@code SingleColumnValueExcludeFilter}
11773    */
11774   public static final class SingleColumnValueExcludeFilter extends
11775       com.google.protobuf.GeneratedMessage
11776       implements SingleColumnValueExcludeFilterOrBuilder {
11777     // Use SingleColumnValueExcludeFilter.newBuilder() to construct.
SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)11778     private SingleColumnValueExcludeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
11779       super(builder);
11780       this.unknownFields = builder.getUnknownFields();
11781     }
SingleColumnValueExcludeFilter(boolean noInit)11782     private SingleColumnValueExcludeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
11783 
11784     private static final SingleColumnValueExcludeFilter defaultInstance;
getDefaultInstance()11785     public static SingleColumnValueExcludeFilter getDefaultInstance() {
11786       return defaultInstance;
11787     }
11788 
getDefaultInstanceForType()11789     public SingleColumnValueExcludeFilter getDefaultInstanceForType() {
11790       return defaultInstance;
11791     }
11792 
11793     private final com.google.protobuf.UnknownFieldSet unknownFields;
11794     @java.lang.Override
11795     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()11796         getUnknownFields() {
11797       return this.unknownFields;
11798     }
SingleColumnValueExcludeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11799     private SingleColumnValueExcludeFilter(
11800         com.google.protobuf.CodedInputStream input,
11801         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11802         throws com.google.protobuf.InvalidProtocolBufferException {
11803       initFields();
11804       int mutable_bitField0_ = 0;
11805       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
11806           com.google.protobuf.UnknownFieldSet.newBuilder();
11807       try {
11808         boolean done = false;
11809         while (!done) {
11810           int tag = input.readTag();
11811           switch (tag) {
11812             case 0:
11813               done = true;
11814               break;
11815             default: {
11816               if (!parseUnknownField(input, unknownFields,
11817                                      extensionRegistry, tag)) {
11818                 done = true;
11819               }
11820               break;
11821             }
11822             case 10: {
11823               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder subBuilder = null;
11824               if (((bitField0_ & 0x00000001) == 0x00000001)) {
11825                 subBuilder = singleColumnValueFilter_.toBuilder();
11826               }
11827               singleColumnValueFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.PARSER, extensionRegistry);
11828               if (subBuilder != null) {
11829                 subBuilder.mergeFrom(singleColumnValueFilter_);
11830                 singleColumnValueFilter_ = subBuilder.buildPartial();
11831               }
11832               bitField0_ |= 0x00000001;
11833               break;
11834             }
11835           }
11836         }
11837       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11838         throw e.setUnfinishedMessage(this);
11839       } catch (java.io.IOException e) {
11840         throw new com.google.protobuf.InvalidProtocolBufferException(
11841             e.getMessage()).setUnfinishedMessage(this);
11842       } finally {
11843         this.unknownFields = unknownFields.build();
11844         makeExtensionsImmutable();
11845       }
11846     }
11847     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11848         getDescriptor() {
11849       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor;
11850     }
11851 
11852     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11853         internalGetFieldAccessorTable() {
11854       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable
11855           .ensureFieldAccessorsInitialized(
11856               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class);
11857     }
11858 
11859     public static com.google.protobuf.Parser<SingleColumnValueExcludeFilter> PARSER =
11860         new com.google.protobuf.AbstractParser<SingleColumnValueExcludeFilter>() {
11861       public SingleColumnValueExcludeFilter parsePartialFrom(
11862           com.google.protobuf.CodedInputStream input,
11863           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11864           throws com.google.protobuf.InvalidProtocolBufferException {
11865         return new SingleColumnValueExcludeFilter(input, extensionRegistry);
11866       }
11867     };
11868 
11869     @java.lang.Override
getParserForType()11870     public com.google.protobuf.Parser<SingleColumnValueExcludeFilter> getParserForType() {
11871       return PARSER;
11872     }
11873 
11874     private int bitField0_;
11875     // required .SingleColumnValueFilter single_column_value_filter = 1;
11876     public static final int SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER = 1;
11877     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_;
11878     /**
11879      * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
11880      */
hasSingleColumnValueFilter()11881     public boolean hasSingleColumnValueFilter() {
11882       return ((bitField0_ & 0x00000001) == 0x00000001);
11883     }
11884     /**
11885      * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
11886      */
getSingleColumnValueFilter()11887     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() {
11888       return singleColumnValueFilter_;
11889     }
11890     /**
11891      * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
11892      */
getSingleColumnValueFilterOrBuilder()11893     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() {
11894       return singleColumnValueFilter_;
11895     }
11896 
initFields()11897     private void initFields() {
11898       singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
11899     }
11900     private byte memoizedIsInitialized = -1;
isInitialized()11901     public final boolean isInitialized() {
11902       byte isInitialized = memoizedIsInitialized;
11903       if (isInitialized != -1) return isInitialized == 1;
11904 
11905       if (!hasSingleColumnValueFilter()) {
11906         memoizedIsInitialized = 0;
11907         return false;
11908       }
11909       if (!getSingleColumnValueFilter().isInitialized()) {
11910         memoizedIsInitialized = 0;
11911         return false;
11912       }
11913       memoizedIsInitialized = 1;
11914       return true;
11915     }
11916 
writeTo(com.google.protobuf.CodedOutputStream output)11917     public void writeTo(com.google.protobuf.CodedOutputStream output)
11918                         throws java.io.IOException {
11919       getSerializedSize();
11920       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11921         output.writeMessage(1, singleColumnValueFilter_);
11922       }
11923       getUnknownFields().writeTo(output);
11924     }
11925 
11926     private int memoizedSerializedSize = -1;
getSerializedSize()11927     public int getSerializedSize() {
11928       int size = memoizedSerializedSize;
11929       if (size != -1) return size;
11930 
11931       size = 0;
11932       if (((bitField0_ & 0x00000001) == 0x00000001)) {
11933         size += com.google.protobuf.CodedOutputStream
11934           .computeMessageSize(1, singleColumnValueFilter_);
11935       }
11936       size += getUnknownFields().getSerializedSize();
11937       memoizedSerializedSize = size;
11938       return size;
11939     }
11940 
11941     private static final long serialVersionUID = 0L;
11942     @java.lang.Override
writeReplace()11943     protected java.lang.Object writeReplace()
11944         throws java.io.ObjectStreamException {
11945       return super.writeReplace();
11946     }
11947 
11948     @java.lang.Override
equals(final java.lang.Object obj)11949     public boolean equals(final java.lang.Object obj) {
11950       if (obj == this) {
11951        return true;
11952       }
11953       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)) {
11954         return super.equals(obj);
11955       }
11956       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) obj;
11957 
11958       boolean result = true;
11959       result = result && (hasSingleColumnValueFilter() == other.hasSingleColumnValueFilter());
11960       if (hasSingleColumnValueFilter()) {
11961         result = result && getSingleColumnValueFilter()
11962             .equals(other.getSingleColumnValueFilter());
11963       }
11964       result = result &&
11965           getUnknownFields().equals(other.getUnknownFields());
11966       return result;
11967     }
11968 
11969     private int memoizedHashCode = 0;
11970     @java.lang.Override
hashCode()11971     public int hashCode() {
11972       if (memoizedHashCode != 0) {
11973         return memoizedHashCode;
11974       }
11975       int hash = 41;
11976       hash = (19 * hash) + getDescriptorForType().hashCode();
11977       if (hasSingleColumnValueFilter()) {
11978         hash = (37 * hash) + SINGLE_COLUMN_VALUE_FILTER_FIELD_NUMBER;
11979         hash = (53 * hash) + getSingleColumnValueFilter().hashCode();
11980       }
11981       hash = (29 * hash) + getUnknownFields().hashCode();
11982       memoizedHashCode = hash;
11983       return hash;
11984     }
11985 
parseFrom( com.google.protobuf.ByteString data)11986     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
11987         com.google.protobuf.ByteString data)
11988         throws com.google.protobuf.InvalidProtocolBufferException {
11989       return PARSER.parseFrom(data);
11990     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11991     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
11992         com.google.protobuf.ByteString data,
11993         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11994         throws com.google.protobuf.InvalidProtocolBufferException {
11995       return PARSER.parseFrom(data, extensionRegistry);
11996     }
parseFrom(byte[] data)11997     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(byte[] data)
11998         throws com.google.protobuf.InvalidProtocolBufferException {
11999       return PARSER.parseFrom(data);
12000     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12001     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
12002         byte[] data,
12003         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12004         throws com.google.protobuf.InvalidProtocolBufferException {
12005       return PARSER.parseFrom(data, extensionRegistry);
12006     }
parseFrom(java.io.InputStream input)12007     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(java.io.InputStream input)
12008         throws java.io.IOException {
12009       return PARSER.parseFrom(input);
12010     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12011     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
12012         java.io.InputStream input,
12013         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12014         throws java.io.IOException {
12015       return PARSER.parseFrom(input, extensionRegistry);
12016     }
parseDelimitedFrom(java.io.InputStream input)12017     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(java.io.InputStream input)
12018         throws java.io.IOException {
12019       return PARSER.parseDelimitedFrom(input);
12020     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12021     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseDelimitedFrom(
12022         java.io.InputStream input,
12023         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12024         throws java.io.IOException {
12025       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12026     }
parseFrom( com.google.protobuf.CodedInputStream input)12027     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
12028         com.google.protobuf.CodedInputStream input)
12029         throws java.io.IOException {
12030       return PARSER.parseFrom(input);
12031     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12032     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parseFrom(
12033         com.google.protobuf.CodedInputStream input,
12034         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12035         throws java.io.IOException {
12036       return PARSER.parseFrom(input, extensionRegistry);
12037     }
12038 
newBuilder()12039     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()12040     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype)12041     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter prototype) {
12042       return newBuilder().mergeFrom(prototype);
12043     }
toBuilder()12044     public Builder toBuilder() { return newBuilder(this); }
12045 
12046     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)12047     protected Builder newBuilderForType(
12048         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12049       Builder builder = new Builder(parent);
12050       return builder;
12051     }
12052     /**
12053      * Protobuf type {@code SingleColumnValueExcludeFilter}
12054      */
12055     public static final class Builder extends
12056         com.google.protobuf.GeneratedMessage.Builder<Builder>
12057        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilterOrBuilder {
12058       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12059           getDescriptor() {
12060         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor;
12061       }
12062 
12063       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12064           internalGetFieldAccessorTable() {
12065         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable
12066             .ensureFieldAccessorsInitialized(
12067                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.Builder.class);
12068       }
12069 
12070       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.newBuilder()
Builder()12071       private Builder() {
12072         maybeForceBuilderInitialization();
12073       }
12074 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)12075       private Builder(
12076           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12077         super(parent);
12078         maybeForceBuilderInitialization();
12079       }
maybeForceBuilderInitialization()12080       private void maybeForceBuilderInitialization() {
12081         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12082           getSingleColumnValueFilterFieldBuilder();
12083         }
12084       }
create()12085       private static Builder create() {
12086         return new Builder();
12087       }
12088 
clear()12089       public Builder clear() {
12090         super.clear();
12091         if (singleColumnValueFilterBuilder_ == null) {
12092           singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
12093         } else {
12094           singleColumnValueFilterBuilder_.clear();
12095         }
12096         bitField0_ = (bitField0_ & ~0x00000001);
12097         return this;
12098       }
12099 
clone()12100       public Builder clone() {
12101         return create().mergeFrom(buildPartial());
12102       }
12103 
12104       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()12105           getDescriptorForType() {
12106         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueExcludeFilter_descriptor;
12107       }
12108 
getDefaultInstanceForType()12109       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter getDefaultInstanceForType() {
12110         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance();
12111       }
12112 
build()12113       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter build() {
12114         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = buildPartial();
12115         if (!result.isInitialized()) {
12116           throw newUninitializedMessageException(result);
12117         }
12118         return result;
12119       }
12120 
buildPartial()12121       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter buildPartial() {
12122         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter(this);
12123         int from_bitField0_ = bitField0_;
12124         int to_bitField0_ = 0;
12125         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12126           to_bitField0_ |= 0x00000001;
12127         }
12128         if (singleColumnValueFilterBuilder_ == null) {
12129           result.singleColumnValueFilter_ = singleColumnValueFilter_;
12130         } else {
12131           result.singleColumnValueFilter_ = singleColumnValueFilterBuilder_.build();
12132         }
12133         result.bitField0_ = to_bitField0_;
12134         onBuilt();
12135         return result;
12136       }
12137 
mergeFrom(com.google.protobuf.Message other)12138       public Builder mergeFrom(com.google.protobuf.Message other) {
12139         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) {
12140           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter)other);
12141         } else {
12142           super.mergeFrom(other);
12143           return this;
12144         }
12145       }
12146 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other)12147       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter other) {
12148         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter.getDefaultInstance()) return this;
12149         if (other.hasSingleColumnValueFilter()) {
12150           mergeSingleColumnValueFilter(other.getSingleColumnValueFilter());
12151         }
12152         this.mergeUnknownFields(other.getUnknownFields());
12153         return this;
12154       }
12155 
isInitialized()12156       public final boolean isInitialized() {
12157         if (!hasSingleColumnValueFilter()) {
12158 
12159           return false;
12160         }
12161         if (!getSingleColumnValueFilter().isInitialized()) {
12162 
12163           return false;
12164         }
12165         return true;
12166       }
12167 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12168       public Builder mergeFrom(
12169           com.google.protobuf.CodedInputStream input,
12170           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12171           throws java.io.IOException {
12172         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter parsedMessage = null;
12173         try {
12174           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12175         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12176           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueExcludeFilter) e.getUnfinishedMessage();
12177           throw e;
12178         } finally {
12179           if (parsedMessage != null) {
12180             mergeFrom(parsedMessage);
12181           }
12182         }
12183         return this;
12184       }
12185       private int bitField0_;
12186 
12187       // required .SingleColumnValueFilter single_column_value_filter = 1;
12188       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
12189       private com.google.protobuf.SingleFieldBuilder<
12190           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder> singleColumnValueFilterBuilder_;
12191       /**
12192        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12193        */
hasSingleColumnValueFilter()12194       public boolean hasSingleColumnValueFilter() {
12195         return ((bitField0_ & 0x00000001) == 0x00000001);
12196       }
12197       /**
12198        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12199        */
getSingleColumnValueFilter()12200       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getSingleColumnValueFilter() {
12201         if (singleColumnValueFilterBuilder_ == null) {
12202           return singleColumnValueFilter_;
12203         } else {
12204           return singleColumnValueFilterBuilder_.getMessage();
12205         }
12206       }
12207       /**
12208        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12209        */
setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value)12210       public Builder setSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) {
12211         if (singleColumnValueFilterBuilder_ == null) {
12212           if (value == null) {
12213             throw new NullPointerException();
12214           }
12215           singleColumnValueFilter_ = value;
12216           onChanged();
12217         } else {
12218           singleColumnValueFilterBuilder_.setMessage(value);
12219         }
12220         bitField0_ |= 0x00000001;
12221         return this;
12222       }
12223       /**
12224        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12225        */
setSingleColumnValueFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue)12226       public Builder setSingleColumnValueFilter(
12227           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder builderForValue) {
12228         if (singleColumnValueFilterBuilder_ == null) {
12229           singleColumnValueFilter_ = builderForValue.build();
12230           onChanged();
12231         } else {
12232           singleColumnValueFilterBuilder_.setMessage(builderForValue.build());
12233         }
12234         bitField0_ |= 0x00000001;
12235         return this;
12236       }
12237       /**
12238        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12239        */
mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value)12240       public Builder mergeSingleColumnValueFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter value) {
12241         if (singleColumnValueFilterBuilder_ == null) {
12242           if (((bitField0_ & 0x00000001) == 0x00000001) &&
12243               singleColumnValueFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) {
12244             singleColumnValueFilter_ =
12245               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder(singleColumnValueFilter_).mergeFrom(value).buildPartial();
12246           } else {
12247             singleColumnValueFilter_ = value;
12248           }
12249           onChanged();
12250         } else {
12251           singleColumnValueFilterBuilder_.mergeFrom(value);
12252         }
12253         bitField0_ |= 0x00000001;
12254         return this;
12255       }
12256       /**
12257        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12258        */
clearSingleColumnValueFilter()12259       public Builder clearSingleColumnValueFilter() {
12260         if (singleColumnValueFilterBuilder_ == null) {
12261           singleColumnValueFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
12262           onChanged();
12263         } else {
12264           singleColumnValueFilterBuilder_.clear();
12265         }
12266         bitField0_ = (bitField0_ & ~0x00000001);
12267         return this;
12268       }
12269       /**
12270        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12271        */
getSingleColumnValueFilterBuilder()12272       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder getSingleColumnValueFilterBuilder() {
12273         bitField0_ |= 0x00000001;
12274         onChanged();
12275         return getSingleColumnValueFilterFieldBuilder().getBuilder();
12276       }
12277       /**
12278        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12279        */
getSingleColumnValueFilterOrBuilder()12280       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder getSingleColumnValueFilterOrBuilder() {
12281         if (singleColumnValueFilterBuilder_ != null) {
12282           return singleColumnValueFilterBuilder_.getMessageOrBuilder();
12283         } else {
12284           return singleColumnValueFilter_;
12285         }
12286       }
12287       /**
12288        * <code>required .SingleColumnValueFilter single_column_value_filter = 1;</code>
12289        */
12290       private com.google.protobuf.SingleFieldBuilder<
12291           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>
getSingleColumnValueFilterFieldBuilder()12292           getSingleColumnValueFilterFieldBuilder() {
12293         if (singleColumnValueFilterBuilder_ == null) {
12294           singleColumnValueFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12295               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder>(
12296                   singleColumnValueFilter_,
12297                   getParentForChildren(),
12298                   isClean());
12299           singleColumnValueFilter_ = null;
12300         }
12301         return singleColumnValueFilterBuilder_;
12302       }
12303 
12304       // @@protoc_insertion_point(builder_scope:SingleColumnValueExcludeFilter)
12305     }
12306 
12307     static {
12308       defaultInstance = new SingleColumnValueExcludeFilter(true);
defaultInstance.initFields()12309       defaultInstance.initFields();
12310     }
12311 
12312     // @@protoc_insertion_point(class_scope:SingleColumnValueExcludeFilter)
12313   }
12314 
12315   public interface SingleColumnValueFilterOrBuilder
12316       extends com.google.protobuf.MessageOrBuilder {
12317 
12318     // optional bytes column_family = 1;
12319     /**
12320      * <code>optional bytes column_family = 1;</code>
12321      */
hasColumnFamily()12322     boolean hasColumnFamily();
12323     /**
12324      * <code>optional bytes column_family = 1;</code>
12325      */
getColumnFamily()12326     com.google.protobuf.ByteString getColumnFamily();
12327 
12328     // optional bytes column_qualifier = 2;
12329     /**
12330      * <code>optional bytes column_qualifier = 2;</code>
12331      */
hasColumnQualifier()12332     boolean hasColumnQualifier();
12333     /**
12334      * <code>optional bytes column_qualifier = 2;</code>
12335      */
getColumnQualifier()12336     com.google.protobuf.ByteString getColumnQualifier();
12337 
12338     // required .CompareType compare_op = 3;
12339     /**
12340      * <code>required .CompareType compare_op = 3;</code>
12341      */
hasCompareOp()12342     boolean hasCompareOp();
12343     /**
12344      * <code>required .CompareType compare_op = 3;</code>
12345      */
getCompareOp()12346     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp();
12347 
12348     // required .Comparator comparator = 4;
12349     /**
12350      * <code>required .Comparator comparator = 4;</code>
12351      */
hasComparator()12352     boolean hasComparator();
12353     /**
12354      * <code>required .Comparator comparator = 4;</code>
12355      */
getComparator()12356     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
12357     /**
12358      * <code>required .Comparator comparator = 4;</code>
12359      */
getComparatorOrBuilder()12360     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
12361 
12362     // optional bool filter_if_missing = 5;
12363     /**
12364      * <code>optional bool filter_if_missing = 5;</code>
12365      */
hasFilterIfMissing()12366     boolean hasFilterIfMissing();
12367     /**
12368      * <code>optional bool filter_if_missing = 5;</code>
12369      */
getFilterIfMissing()12370     boolean getFilterIfMissing();
12371 
12372     // optional bool latest_version_only = 6;
12373     /**
12374      * <code>optional bool latest_version_only = 6;</code>
12375      */
hasLatestVersionOnly()12376     boolean hasLatestVersionOnly();
12377     /**
12378      * <code>optional bool latest_version_only = 6;</code>
12379      */
getLatestVersionOnly()12380     boolean getLatestVersionOnly();
12381   }
12382   /**
12383    * Protobuf type {@code SingleColumnValueFilter}
12384    */
12385   public static final class SingleColumnValueFilter extends
12386       com.google.protobuf.GeneratedMessage
12387       implements SingleColumnValueFilterOrBuilder {
12388     // Use SingleColumnValueFilter.newBuilder() to construct.
SingleColumnValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)12389     private SingleColumnValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12390       super(builder);
12391       this.unknownFields = builder.getUnknownFields();
12392     }
SingleColumnValueFilter(boolean noInit)12393     private SingleColumnValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12394 
12395     private static final SingleColumnValueFilter defaultInstance;
getDefaultInstance()12396     public static SingleColumnValueFilter getDefaultInstance() {
12397       return defaultInstance;
12398     }
12399 
getDefaultInstanceForType()12400     public SingleColumnValueFilter getDefaultInstanceForType() {
12401       return defaultInstance;
12402     }
12403 
12404     private final com.google.protobuf.UnknownFieldSet unknownFields;
12405     @java.lang.Override
12406     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()12407         getUnknownFields() {
12408       return this.unknownFields;
12409     }
SingleColumnValueFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12410     private SingleColumnValueFilter(
12411         com.google.protobuf.CodedInputStream input,
12412         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12413         throws com.google.protobuf.InvalidProtocolBufferException {
12414       initFields();
12415       int mutable_bitField0_ = 0;
12416       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12417           com.google.protobuf.UnknownFieldSet.newBuilder();
12418       try {
12419         boolean done = false;
12420         while (!done) {
12421           int tag = input.readTag();
12422           switch (tag) {
12423             case 0:
12424               done = true;
12425               break;
12426             default: {
12427               if (!parseUnknownField(input, unknownFields,
12428                                      extensionRegistry, tag)) {
12429                 done = true;
12430               }
12431               break;
12432             }
12433             case 10: {
12434               bitField0_ |= 0x00000001;
12435               columnFamily_ = input.readBytes();
12436               break;
12437             }
12438             case 18: {
12439               bitField0_ |= 0x00000002;
12440               columnQualifier_ = input.readBytes();
12441               break;
12442             }
12443             case 24: {
12444               int rawValue = input.readEnum();
12445               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
12446               if (value == null) {
12447                 unknownFields.mergeVarintField(3, rawValue);
12448               } else {
12449                 bitField0_ |= 0x00000004;
12450                 compareOp_ = value;
12451               }
12452               break;
12453             }
12454             case 34: {
12455               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
12456               if (((bitField0_ & 0x00000008) == 0x00000008)) {
12457                 subBuilder = comparator_.toBuilder();
12458               }
12459               comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
12460               if (subBuilder != null) {
12461                 subBuilder.mergeFrom(comparator_);
12462                 comparator_ = subBuilder.buildPartial();
12463               }
12464               bitField0_ |= 0x00000008;
12465               break;
12466             }
12467             case 40: {
12468               bitField0_ |= 0x00000010;
12469               filterIfMissing_ = input.readBool();
12470               break;
12471             }
12472             case 48: {
12473               bitField0_ |= 0x00000020;
12474               latestVersionOnly_ = input.readBool();
12475               break;
12476             }
12477           }
12478         }
12479       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12480         throw e.setUnfinishedMessage(this);
12481       } catch (java.io.IOException e) {
12482         throw new com.google.protobuf.InvalidProtocolBufferException(
12483             e.getMessage()).setUnfinishedMessage(this);
12484       } finally {
12485         this.unknownFields = unknownFields.build();
12486         makeExtensionsImmutable();
12487       }
12488     }
12489     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12490         getDescriptor() {
12491       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor;
12492     }
12493 
12494     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12495         internalGetFieldAccessorTable() {
12496       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable
12497           .ensureFieldAccessorsInitialized(
12498               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class);
12499     }
12500 
12501     public static com.google.protobuf.Parser<SingleColumnValueFilter> PARSER =
12502         new com.google.protobuf.AbstractParser<SingleColumnValueFilter>() {
12503       public SingleColumnValueFilter parsePartialFrom(
12504           com.google.protobuf.CodedInputStream input,
12505           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12506           throws com.google.protobuf.InvalidProtocolBufferException {
12507         return new SingleColumnValueFilter(input, extensionRegistry);
12508       }
12509     };
12510 
12511     @java.lang.Override
getParserForType()12512     public com.google.protobuf.Parser<SingleColumnValueFilter> getParserForType() {
12513       return PARSER;
12514     }
12515 
12516     private int bitField0_;
12517     // optional bytes column_family = 1;
12518     public static final int COLUMN_FAMILY_FIELD_NUMBER = 1;
12519     private com.google.protobuf.ByteString columnFamily_;
12520     /**
12521      * <code>optional bytes column_family = 1;</code>
12522      */
hasColumnFamily()12523     public boolean hasColumnFamily() {
12524       return ((bitField0_ & 0x00000001) == 0x00000001);
12525     }
12526     /**
12527      * <code>optional bytes column_family = 1;</code>
12528      */
getColumnFamily()12529     public com.google.protobuf.ByteString getColumnFamily() {
12530       return columnFamily_;
12531     }
12532 
12533     // optional bytes column_qualifier = 2;
12534     public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 2;
12535     private com.google.protobuf.ByteString columnQualifier_;
12536     /**
12537      * <code>optional bytes column_qualifier = 2;</code>
12538      */
hasColumnQualifier()12539     public boolean hasColumnQualifier() {
12540       return ((bitField0_ & 0x00000002) == 0x00000002);
12541     }
12542     /**
12543      * <code>optional bytes column_qualifier = 2;</code>
12544      */
getColumnQualifier()12545     public com.google.protobuf.ByteString getColumnQualifier() {
12546       return columnQualifier_;
12547     }
12548 
12549     // required .CompareType compare_op = 3;
12550     public static final int COMPARE_OP_FIELD_NUMBER = 3;
12551     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_;
12552     /**
12553      * <code>required .CompareType compare_op = 3;</code>
12554      */
hasCompareOp()12555     public boolean hasCompareOp() {
12556       return ((bitField0_ & 0x00000004) == 0x00000004);
12557     }
12558     /**
12559      * <code>required .CompareType compare_op = 3;</code>
12560      */
getCompareOp()12561     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
12562       return compareOp_;
12563     }
12564 
12565     // required .Comparator comparator = 4;
12566     public static final int COMPARATOR_FIELD_NUMBER = 4;
12567     private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
12568     /**
12569      * <code>required .Comparator comparator = 4;</code>
12570      */
hasComparator()12571     public boolean hasComparator() {
12572       return ((bitField0_ & 0x00000008) == 0x00000008);
12573     }
12574     /**
12575      * <code>required .Comparator comparator = 4;</code>
12576      */
getComparator()12577     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
12578       return comparator_;
12579     }
12580     /**
12581      * <code>required .Comparator comparator = 4;</code>
12582      */
getComparatorOrBuilder()12583     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
12584       return comparator_;
12585     }
12586 
12587     // optional bool filter_if_missing = 5;
12588     public static final int FILTER_IF_MISSING_FIELD_NUMBER = 5;
12589     private boolean filterIfMissing_;
12590     /**
12591      * <code>optional bool filter_if_missing = 5;</code>
12592      */
hasFilterIfMissing()12593     public boolean hasFilterIfMissing() {
12594       return ((bitField0_ & 0x00000010) == 0x00000010);
12595     }
12596     /**
12597      * <code>optional bool filter_if_missing = 5;</code>
12598      */
getFilterIfMissing()12599     public boolean getFilterIfMissing() {
12600       return filterIfMissing_;
12601     }
12602 
12603     // optional bool latest_version_only = 6;
12604     public static final int LATEST_VERSION_ONLY_FIELD_NUMBER = 6;
12605     private boolean latestVersionOnly_;
12606     /**
12607      * <code>optional bool latest_version_only = 6;</code>
12608      */
hasLatestVersionOnly()12609     public boolean hasLatestVersionOnly() {
12610       return ((bitField0_ & 0x00000020) == 0x00000020);
12611     }
12612     /**
12613      * <code>optional bool latest_version_only = 6;</code>
12614      */
getLatestVersionOnly()12615     public boolean getLatestVersionOnly() {
12616       return latestVersionOnly_;
12617     }
12618 
initFields()12619     private void initFields() {
12620       columnFamily_ = com.google.protobuf.ByteString.EMPTY;
12621       columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
12622       compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
12623       comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
12624       filterIfMissing_ = false;
12625       latestVersionOnly_ = false;
12626     }
12627     private byte memoizedIsInitialized = -1;
isInitialized()12628     public final boolean isInitialized() {
12629       byte isInitialized = memoizedIsInitialized;
12630       if (isInitialized != -1) return isInitialized == 1;
12631 
12632       if (!hasCompareOp()) {
12633         memoizedIsInitialized = 0;
12634         return false;
12635       }
12636       if (!hasComparator()) {
12637         memoizedIsInitialized = 0;
12638         return false;
12639       }
12640       if (!getComparator().isInitialized()) {
12641         memoizedIsInitialized = 0;
12642         return false;
12643       }
12644       memoizedIsInitialized = 1;
12645       return true;
12646     }
12647 
writeTo(com.google.protobuf.CodedOutputStream output)12648     public void writeTo(com.google.protobuf.CodedOutputStream output)
12649                         throws java.io.IOException {
12650       getSerializedSize();
12651       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12652         output.writeBytes(1, columnFamily_);
12653       }
12654       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12655         output.writeBytes(2, columnQualifier_);
12656       }
12657       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12658         output.writeEnum(3, compareOp_.getNumber());
12659       }
12660       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12661         output.writeMessage(4, comparator_);
12662       }
12663       if (((bitField0_ & 0x00000010) == 0x00000010)) {
12664         output.writeBool(5, filterIfMissing_);
12665       }
12666       if (((bitField0_ & 0x00000020) == 0x00000020)) {
12667         output.writeBool(6, latestVersionOnly_);
12668       }
12669       getUnknownFields().writeTo(output);
12670     }
12671 
12672     private int memoizedSerializedSize = -1;
getSerializedSize()12673     public int getSerializedSize() {
12674       int size = memoizedSerializedSize;
12675       if (size != -1) return size;
12676 
12677       size = 0;
12678       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12679         size += com.google.protobuf.CodedOutputStream
12680           .computeBytesSize(1, columnFamily_);
12681       }
12682       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12683         size += com.google.protobuf.CodedOutputStream
12684           .computeBytesSize(2, columnQualifier_);
12685       }
12686       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12687         size += com.google.protobuf.CodedOutputStream
12688           .computeEnumSize(3, compareOp_.getNumber());
12689       }
12690       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12691         size += com.google.protobuf.CodedOutputStream
12692           .computeMessageSize(4, comparator_);
12693       }
12694       if (((bitField0_ & 0x00000010) == 0x00000010)) {
12695         size += com.google.protobuf.CodedOutputStream
12696           .computeBoolSize(5, filterIfMissing_);
12697       }
12698       if (((bitField0_ & 0x00000020) == 0x00000020)) {
12699         size += com.google.protobuf.CodedOutputStream
12700           .computeBoolSize(6, latestVersionOnly_);
12701       }
12702       size += getUnknownFields().getSerializedSize();
12703       memoizedSerializedSize = size;
12704       return size;
12705     }
12706 
12707     private static final long serialVersionUID = 0L;
12708     @java.lang.Override
writeReplace()12709     protected java.lang.Object writeReplace()
12710         throws java.io.ObjectStreamException {
12711       return super.writeReplace();
12712     }
12713 
12714     @java.lang.Override
equals(final java.lang.Object obj)12715     public boolean equals(final java.lang.Object obj) {
12716       if (obj == this) {
12717        return true;
12718       }
12719       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)) {
12720         return super.equals(obj);
12721       }
12722       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) obj;
12723 
12724       boolean result = true;
12725       result = result && (hasColumnFamily() == other.hasColumnFamily());
12726       if (hasColumnFamily()) {
12727         result = result && getColumnFamily()
12728             .equals(other.getColumnFamily());
12729       }
12730       result = result && (hasColumnQualifier() == other.hasColumnQualifier());
12731       if (hasColumnQualifier()) {
12732         result = result && getColumnQualifier()
12733             .equals(other.getColumnQualifier());
12734       }
12735       result = result && (hasCompareOp() == other.hasCompareOp());
12736       if (hasCompareOp()) {
12737         result = result &&
12738             (getCompareOp() == other.getCompareOp());
12739       }
12740       result = result && (hasComparator() == other.hasComparator());
12741       if (hasComparator()) {
12742         result = result && getComparator()
12743             .equals(other.getComparator());
12744       }
12745       result = result && (hasFilterIfMissing() == other.hasFilterIfMissing());
12746       if (hasFilterIfMissing()) {
12747         result = result && (getFilterIfMissing()
12748             == other.getFilterIfMissing());
12749       }
12750       result = result && (hasLatestVersionOnly() == other.hasLatestVersionOnly());
12751       if (hasLatestVersionOnly()) {
12752         result = result && (getLatestVersionOnly()
12753             == other.getLatestVersionOnly());
12754       }
12755       result = result &&
12756           getUnknownFields().equals(other.getUnknownFields());
12757       return result;
12758     }
12759 
12760     private int memoizedHashCode = 0;
12761     @java.lang.Override
hashCode()12762     public int hashCode() {
12763       if (memoizedHashCode != 0) {
12764         return memoizedHashCode;
12765       }
12766       int hash = 41;
12767       hash = (19 * hash) + getDescriptorForType().hashCode();
12768       if (hasColumnFamily()) {
12769         hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER;
12770         hash = (53 * hash) + getColumnFamily().hashCode();
12771       }
12772       if (hasColumnQualifier()) {
12773         hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER;
12774         hash = (53 * hash) + getColumnQualifier().hashCode();
12775       }
12776       if (hasCompareOp()) {
12777         hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER;
12778         hash = (53 * hash) + hashEnum(getCompareOp());
12779       }
12780       if (hasComparator()) {
12781         hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
12782         hash = (53 * hash) + getComparator().hashCode();
12783       }
12784       if (hasFilterIfMissing()) {
12785         hash = (37 * hash) + FILTER_IF_MISSING_FIELD_NUMBER;
12786         hash = (53 * hash) + hashBoolean(getFilterIfMissing());
12787       }
12788       if (hasLatestVersionOnly()) {
12789         hash = (37 * hash) + LATEST_VERSION_ONLY_FIELD_NUMBER;
12790         hash = (53 * hash) + hashBoolean(getLatestVersionOnly());
12791       }
12792       hash = (29 * hash) + getUnknownFields().hashCode();
12793       memoizedHashCode = hash;
12794       return hash;
12795     }
12796 
parseFrom( com.google.protobuf.ByteString data)12797     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
12798         com.google.protobuf.ByteString data)
12799         throws com.google.protobuf.InvalidProtocolBufferException {
12800       return PARSER.parseFrom(data);
12801     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12802     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
12803         com.google.protobuf.ByteString data,
12804         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12805         throws com.google.protobuf.InvalidProtocolBufferException {
12806       return PARSER.parseFrom(data, extensionRegistry);
12807     }
parseFrom(byte[] data)12808     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(byte[] data)
12809         throws com.google.protobuf.InvalidProtocolBufferException {
12810       return PARSER.parseFrom(data);
12811     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12812     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
12813         byte[] data,
12814         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12815         throws com.google.protobuf.InvalidProtocolBufferException {
12816       return PARSER.parseFrom(data, extensionRegistry);
12817     }
parseFrom(java.io.InputStream input)12818     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(java.io.InputStream input)
12819         throws java.io.IOException {
12820       return PARSER.parseFrom(input);
12821     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12822     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
12823         java.io.InputStream input,
12824         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12825         throws java.io.IOException {
12826       return PARSER.parseFrom(input, extensionRegistry);
12827     }
parseDelimitedFrom(java.io.InputStream input)12828     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(java.io.InputStream input)
12829         throws java.io.IOException {
12830       return PARSER.parseDelimitedFrom(input);
12831     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12832     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseDelimitedFrom(
12833         java.io.InputStream input,
12834         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12835         throws java.io.IOException {
12836       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12837     }
parseFrom( com.google.protobuf.CodedInputStream input)12838     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
12839         com.google.protobuf.CodedInputStream input)
12840         throws java.io.IOException {
12841       return PARSER.parseFrom(input);
12842     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12843     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parseFrom(
12844         com.google.protobuf.CodedInputStream input,
12845         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12846         throws java.io.IOException {
12847       return PARSER.parseFrom(input, extensionRegistry);
12848     }
12849 
newBuilder()12850     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()12851     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype)12852     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter prototype) {
12853       return newBuilder().mergeFrom(prototype);
12854     }
toBuilder()12855     public Builder toBuilder() { return newBuilder(this); }
12856 
12857     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)12858     protected Builder newBuilderForType(
12859         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12860       Builder builder = new Builder(parent);
12861       return builder;
12862     }
12863     /**
12864      * Protobuf type {@code SingleColumnValueFilter}
12865      */
12866     public static final class Builder extends
12867         com.google.protobuf.GeneratedMessage.Builder<Builder>
12868        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilterOrBuilder {
12869       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12870           getDescriptor() {
12871         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor;
12872       }
12873 
12874       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12875           internalGetFieldAccessorTable() {
12876         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_fieldAccessorTable
12877             .ensureFieldAccessorsInitialized(
12878                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.Builder.class);
12879       }
12880 
12881       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.newBuilder()
Builder()12882       private Builder() {
12883         maybeForceBuilderInitialization();
12884       }
12885 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)12886       private Builder(
12887           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12888         super(parent);
12889         maybeForceBuilderInitialization();
12890       }
maybeForceBuilderInitialization()12891       private void maybeForceBuilderInitialization() {
12892         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12893           getComparatorFieldBuilder();
12894         }
12895       }
create()12896       private static Builder create() {
12897         return new Builder();
12898       }
12899 
clear()12900       public Builder clear() {
12901         super.clear();
12902         columnFamily_ = com.google.protobuf.ByteString.EMPTY;
12903         bitField0_ = (bitField0_ & ~0x00000001);
12904         columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
12905         bitField0_ = (bitField0_ & ~0x00000002);
12906         compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
12907         bitField0_ = (bitField0_ & ~0x00000004);
12908         if (comparatorBuilder_ == null) {
12909           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
12910         } else {
12911           comparatorBuilder_.clear();
12912         }
12913         bitField0_ = (bitField0_ & ~0x00000008);
12914         filterIfMissing_ = false;
12915         bitField0_ = (bitField0_ & ~0x00000010);
12916         latestVersionOnly_ = false;
12917         bitField0_ = (bitField0_ & ~0x00000020);
12918         return this;
12919       }
12920 
clone()12921       public Builder clone() {
12922         return create().mergeFrom(buildPartial());
12923       }
12924 
12925       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()12926           getDescriptorForType() {
12927         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SingleColumnValueFilter_descriptor;
12928       }
12929 
getDefaultInstanceForType()12930       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter getDefaultInstanceForType() {
12931         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance();
12932       }
12933 
build()12934       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter build() {
12935         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = buildPartial();
12936         if (!result.isInitialized()) {
12937           throw newUninitializedMessageException(result);
12938         }
12939         return result;
12940       }
12941 
buildPartial()12942       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter buildPartial() {
12943         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter(this);
12944         int from_bitField0_ = bitField0_;
12945         int to_bitField0_ = 0;
12946         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12947           to_bitField0_ |= 0x00000001;
12948         }
12949         result.columnFamily_ = columnFamily_;
12950         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
12951           to_bitField0_ |= 0x00000002;
12952         }
12953         result.columnQualifier_ = columnQualifier_;
12954         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
12955           to_bitField0_ |= 0x00000004;
12956         }
12957         result.compareOp_ = compareOp_;
12958         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
12959           to_bitField0_ |= 0x00000008;
12960         }
12961         if (comparatorBuilder_ == null) {
12962           result.comparator_ = comparator_;
12963         } else {
12964           result.comparator_ = comparatorBuilder_.build();
12965         }
12966         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
12967           to_bitField0_ |= 0x00000010;
12968         }
12969         result.filterIfMissing_ = filterIfMissing_;
12970         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
12971           to_bitField0_ |= 0x00000020;
12972         }
12973         result.latestVersionOnly_ = latestVersionOnly_;
12974         result.bitField0_ = to_bitField0_;
12975         onBuilt();
12976         return result;
12977       }
12978 
mergeFrom(com.google.protobuf.Message other)12979       public Builder mergeFrom(com.google.protobuf.Message other) {
12980         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) {
12981           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter)other);
12982         } else {
12983           super.mergeFrom(other);
12984           return this;
12985         }
12986       }
12987 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other)12988       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter other) {
12989         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter.getDefaultInstance()) return this;
12990         if (other.hasColumnFamily()) {
12991           setColumnFamily(other.getColumnFamily());
12992         }
12993         if (other.hasColumnQualifier()) {
12994           setColumnQualifier(other.getColumnQualifier());
12995         }
12996         if (other.hasCompareOp()) {
12997           setCompareOp(other.getCompareOp());
12998         }
12999         if (other.hasComparator()) {
13000           mergeComparator(other.getComparator());
13001         }
13002         if (other.hasFilterIfMissing()) {
13003           setFilterIfMissing(other.getFilterIfMissing());
13004         }
13005         if (other.hasLatestVersionOnly()) {
13006           setLatestVersionOnly(other.getLatestVersionOnly());
13007         }
13008         this.mergeUnknownFields(other.getUnknownFields());
13009         return this;
13010       }
13011 
isInitialized()13012       public final boolean isInitialized() {
13013         if (!hasCompareOp()) {
13014 
13015           return false;
13016         }
13017         if (!hasComparator()) {
13018 
13019           return false;
13020         }
13021         if (!getComparator().isInitialized()) {
13022 
13023           return false;
13024         }
13025         return true;
13026       }
13027 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13028       public Builder mergeFrom(
13029           com.google.protobuf.CodedInputStream input,
13030           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13031           throws java.io.IOException {
13032         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter parsedMessage = null;
13033         try {
13034           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13035         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13036           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SingleColumnValueFilter) e.getUnfinishedMessage();
13037           throw e;
13038         } finally {
13039           if (parsedMessage != null) {
13040             mergeFrom(parsedMessage);
13041           }
13042         }
13043         return this;
13044       }
13045       private int bitField0_;
13046 
13047       // optional bytes column_family = 1;
13048       private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY;
13049       /**
13050        * <code>optional bytes column_family = 1;</code>
13051        */
hasColumnFamily()13052       public boolean hasColumnFamily() {
13053         return ((bitField0_ & 0x00000001) == 0x00000001);
13054       }
13055       /**
13056        * <code>optional bytes column_family = 1;</code>
13057        */
getColumnFamily()13058       public com.google.protobuf.ByteString getColumnFamily() {
13059         return columnFamily_;
13060       }
13061       /**
13062        * <code>optional bytes column_family = 1;</code>
13063        */
setColumnFamily(com.google.protobuf.ByteString value)13064       public Builder setColumnFamily(com.google.protobuf.ByteString value) {
13065         if (value == null) {
13066     throw new NullPointerException();
13067   }
13068   bitField0_ |= 0x00000001;
13069         columnFamily_ = value;
13070         onChanged();
13071         return this;
13072       }
13073       /**
13074        * <code>optional bytes column_family = 1;</code>
13075        */
clearColumnFamily()13076       public Builder clearColumnFamily() {
13077         bitField0_ = (bitField0_ & ~0x00000001);
13078         columnFamily_ = getDefaultInstance().getColumnFamily();
13079         onChanged();
13080         return this;
13081       }
13082 
13083       // optional bytes column_qualifier = 2;
13084       private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY;
13085       /**
13086        * <code>optional bytes column_qualifier = 2;</code>
13087        */
hasColumnQualifier()13088       public boolean hasColumnQualifier() {
13089         return ((bitField0_ & 0x00000002) == 0x00000002);
13090       }
13091       /**
13092        * <code>optional bytes column_qualifier = 2;</code>
13093        */
getColumnQualifier()13094       public com.google.protobuf.ByteString getColumnQualifier() {
13095         return columnQualifier_;
13096       }
13097       /**
13098        * <code>optional bytes column_qualifier = 2;</code>
13099        */
setColumnQualifier(com.google.protobuf.ByteString value)13100       public Builder setColumnQualifier(com.google.protobuf.ByteString value) {
13101         if (value == null) {
13102     throw new NullPointerException();
13103   }
13104   bitField0_ |= 0x00000002;
13105         columnQualifier_ = value;
13106         onChanged();
13107         return this;
13108       }
13109       /**
13110        * <code>optional bytes column_qualifier = 2;</code>
13111        */
clearColumnQualifier()13112       public Builder clearColumnQualifier() {
13113         bitField0_ = (bitField0_ & ~0x00000002);
13114         columnQualifier_ = getDefaultInstance().getColumnQualifier();
13115         onChanged();
13116         return this;
13117       }
13118 
13119       // required .CompareType compare_op = 3;
13120       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
13121       /**
13122        * <code>required .CompareType compare_op = 3;</code>
13123        */
hasCompareOp()13124       public boolean hasCompareOp() {
13125         return ((bitField0_ & 0x00000004) == 0x00000004);
13126       }
13127       /**
13128        * <code>required .CompareType compare_op = 3;</code>
13129        */
getCompareOp()13130       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() {
13131         return compareOp_;
13132       }
13133       /**
13134        * <code>required .CompareType compare_op = 3;</code>
13135        */
setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value)13136       public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
13137         if (value == null) {
13138           throw new NullPointerException();
13139         }
13140         bitField0_ |= 0x00000004;
13141         compareOp_ = value;
13142         onChanged();
13143         return this;
13144       }
13145       /**
13146        * <code>required .CompareType compare_op = 3;</code>
13147        */
clearCompareOp()13148       public Builder clearCompareOp() {
13149         bitField0_ = (bitField0_ & ~0x00000004);
13150         compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
13151         onChanged();
13152         return this;
13153       }
13154 
13155       // required .Comparator comparator = 4;
13156       private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
13157       private com.google.protobuf.SingleFieldBuilder<
13158           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
13159       /**
13160        * <code>required .Comparator comparator = 4;</code>
13161        */
hasComparator()13162       public boolean hasComparator() {
13163         return ((bitField0_ & 0x00000008) == 0x00000008);
13164       }
13165       /**
13166        * <code>required .Comparator comparator = 4;</code>
13167        */
getComparator()13168       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
13169         if (comparatorBuilder_ == null) {
13170           return comparator_;
13171         } else {
13172           return comparatorBuilder_.getMessage();
13173         }
13174       }
13175       /**
13176        * <code>required .Comparator comparator = 4;</code>
13177        */
setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)13178       public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
13179         if (comparatorBuilder_ == null) {
13180           if (value == null) {
13181             throw new NullPointerException();
13182           }
13183           comparator_ = value;
13184           onChanged();
13185         } else {
13186           comparatorBuilder_.setMessage(value);
13187         }
13188         bitField0_ |= 0x00000008;
13189         return this;
13190       }
13191       /**
13192        * <code>required .Comparator comparator = 4;</code>
13193        */
setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue)13194       public Builder setComparator(
13195           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
13196         if (comparatorBuilder_ == null) {
13197           comparator_ = builderForValue.build();
13198           onChanged();
13199         } else {
13200           comparatorBuilder_.setMessage(builderForValue.build());
13201         }
13202         bitField0_ |= 0x00000008;
13203         return this;
13204       }
13205       /**
13206        * <code>required .Comparator comparator = 4;</code>
13207        */
mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)13208       public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
13209         if (comparatorBuilder_ == null) {
13210           if (((bitField0_ & 0x00000008) == 0x00000008) &&
13211               comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
13212             comparator_ =
13213               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
13214           } else {
13215             comparator_ = value;
13216           }
13217           onChanged();
13218         } else {
13219           comparatorBuilder_.mergeFrom(value);
13220         }
13221         bitField0_ |= 0x00000008;
13222         return this;
13223       }
13224       /**
13225        * <code>required .Comparator comparator = 4;</code>
13226        */
clearComparator()13227       public Builder clearComparator() {
13228         if (comparatorBuilder_ == null) {
13229           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
13230           onChanged();
13231         } else {
13232           comparatorBuilder_.clear();
13233         }
13234         bitField0_ = (bitField0_ & ~0x00000008);
13235         return this;
13236       }
13237       /**
13238        * <code>required .Comparator comparator = 4;</code>
13239        */
getComparatorBuilder()13240       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
13241         bitField0_ |= 0x00000008;
13242         onChanged();
13243         return getComparatorFieldBuilder().getBuilder();
13244       }
13245       /**
13246        * <code>required .Comparator comparator = 4;</code>
13247        */
getComparatorOrBuilder()13248       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
13249         if (comparatorBuilder_ != null) {
13250           return comparatorBuilder_.getMessageOrBuilder();
13251         } else {
13252           return comparator_;
13253         }
13254       }
13255       /**
13256        * <code>required .Comparator comparator = 4;</code>
13257        */
13258       private com.google.protobuf.SingleFieldBuilder<
13259           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>
getComparatorFieldBuilder()13260           getComparatorFieldBuilder() {
13261         if (comparatorBuilder_ == null) {
13262           comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13263               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
13264                   comparator_,
13265                   getParentForChildren(),
13266                   isClean());
13267           comparator_ = null;
13268         }
13269         return comparatorBuilder_;
13270       }
13271 
13272       // optional bool filter_if_missing = 5;
13273       private boolean filterIfMissing_ ;
13274       /**
13275        * <code>optional bool filter_if_missing = 5;</code>
13276        */
hasFilterIfMissing()13277       public boolean hasFilterIfMissing() {
13278         return ((bitField0_ & 0x00000010) == 0x00000010);
13279       }
13280       /**
13281        * <code>optional bool filter_if_missing = 5;</code>
13282        */
getFilterIfMissing()13283       public boolean getFilterIfMissing() {
13284         return filterIfMissing_;
13285       }
13286       /**
13287        * <code>optional bool filter_if_missing = 5;</code>
13288        */
setFilterIfMissing(boolean value)13289       public Builder setFilterIfMissing(boolean value) {
13290         bitField0_ |= 0x00000010;
13291         filterIfMissing_ = value;
13292         onChanged();
13293         return this;
13294       }
13295       /**
13296        * <code>optional bool filter_if_missing = 5;</code>
13297        */
clearFilterIfMissing()13298       public Builder clearFilterIfMissing() {
13299         bitField0_ = (bitField0_ & ~0x00000010);
13300         filterIfMissing_ = false;
13301         onChanged();
13302         return this;
13303       }
13304 
13305       // optional bool latest_version_only = 6;
13306       private boolean latestVersionOnly_ ;
13307       /**
13308        * <code>optional bool latest_version_only = 6;</code>
13309        */
hasLatestVersionOnly()13310       public boolean hasLatestVersionOnly() {
13311         return ((bitField0_ & 0x00000020) == 0x00000020);
13312       }
13313       /**
13314        * <code>optional bool latest_version_only = 6;</code>
13315        */
getLatestVersionOnly()13316       public boolean getLatestVersionOnly() {
13317         return latestVersionOnly_;
13318       }
13319       /**
13320        * <code>optional bool latest_version_only = 6;</code>
13321        */
setLatestVersionOnly(boolean value)13322       public Builder setLatestVersionOnly(boolean value) {
13323         bitField0_ |= 0x00000020;
13324         latestVersionOnly_ = value;
13325         onChanged();
13326         return this;
13327       }
13328       /**
13329        * <code>optional bool latest_version_only = 6;</code>
13330        */
clearLatestVersionOnly()13331       public Builder clearLatestVersionOnly() {
13332         bitField0_ = (bitField0_ & ~0x00000020);
13333         latestVersionOnly_ = false;
13334         onChanged();
13335         return this;
13336       }
13337 
13338       // @@protoc_insertion_point(builder_scope:SingleColumnValueFilter)
13339     }
13340 
13341     static {
13342       defaultInstance = new SingleColumnValueFilter(true);
defaultInstance.initFields()13343       defaultInstance.initFields();
13344     }
13345 
13346     // @@protoc_insertion_point(class_scope:SingleColumnValueFilter)
13347   }
13348 
13349   public interface SkipFilterOrBuilder
13350       extends com.google.protobuf.MessageOrBuilder {
13351 
13352     // required .Filter filter = 1;
13353     /**
13354      * <code>required .Filter filter = 1;</code>
13355      */
hasFilter()13356     boolean hasFilter();
13357     /**
13358      * <code>required .Filter filter = 1;</code>
13359      */
getFilter()13360     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
13361     /**
13362      * <code>required .Filter filter = 1;</code>
13363      */
getFilterOrBuilder()13364     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
13365   }
13366   /**
13367    * Protobuf type {@code SkipFilter}
13368    */
13369   public static final class SkipFilter extends
13370       com.google.protobuf.GeneratedMessage
13371       implements SkipFilterOrBuilder {
13372     // Use SkipFilter.newBuilder() to construct.
SkipFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)13373     private SkipFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13374       super(builder);
13375       this.unknownFields = builder.getUnknownFields();
13376     }
SkipFilter(boolean noInit)13377     private SkipFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13378 
13379     private static final SkipFilter defaultInstance;
getDefaultInstance()13380     public static SkipFilter getDefaultInstance() {
13381       return defaultInstance;
13382     }
13383 
getDefaultInstanceForType()13384     public SkipFilter getDefaultInstanceForType() {
13385       return defaultInstance;
13386     }
13387 
13388     private final com.google.protobuf.UnknownFieldSet unknownFields;
13389     @java.lang.Override
13390     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()13391         getUnknownFields() {
13392       return this.unknownFields;
13393     }
SkipFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13394     private SkipFilter(
13395         com.google.protobuf.CodedInputStream input,
13396         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13397         throws com.google.protobuf.InvalidProtocolBufferException {
13398       initFields();
13399       int mutable_bitField0_ = 0;
13400       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13401           com.google.protobuf.UnknownFieldSet.newBuilder();
13402       try {
13403         boolean done = false;
13404         while (!done) {
13405           int tag = input.readTag();
13406           switch (tag) {
13407             case 0:
13408               done = true;
13409               break;
13410             default: {
13411               if (!parseUnknownField(input, unknownFields,
13412                                      extensionRegistry, tag)) {
13413                 done = true;
13414               }
13415               break;
13416             }
13417             case 10: {
13418               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
13419               if (((bitField0_ & 0x00000001) == 0x00000001)) {
13420                 subBuilder = filter_.toBuilder();
13421               }
13422               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
13423               if (subBuilder != null) {
13424                 subBuilder.mergeFrom(filter_);
13425                 filter_ = subBuilder.buildPartial();
13426               }
13427               bitField0_ |= 0x00000001;
13428               break;
13429             }
13430           }
13431         }
13432       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13433         throw e.setUnfinishedMessage(this);
13434       } catch (java.io.IOException e) {
13435         throw new com.google.protobuf.InvalidProtocolBufferException(
13436             e.getMessage()).setUnfinishedMessage(this);
13437       } finally {
13438         this.unknownFields = unknownFields.build();
13439         makeExtensionsImmutable();
13440       }
13441     }
13442     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13443         getDescriptor() {
13444       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor;
13445     }
13446 
13447     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13448         internalGetFieldAccessorTable() {
13449       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable
13450           .ensureFieldAccessorsInitialized(
13451               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class);
13452     }
13453 
13454     public static com.google.protobuf.Parser<SkipFilter> PARSER =
13455         new com.google.protobuf.AbstractParser<SkipFilter>() {
13456       public SkipFilter parsePartialFrom(
13457           com.google.protobuf.CodedInputStream input,
13458           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13459           throws com.google.protobuf.InvalidProtocolBufferException {
13460         return new SkipFilter(input, extensionRegistry);
13461       }
13462     };
13463 
13464     @java.lang.Override
getParserForType()13465     public com.google.protobuf.Parser<SkipFilter> getParserForType() {
13466       return PARSER;
13467     }
13468 
13469     private int bitField0_;
13470     // required .Filter filter = 1;
13471     public static final int FILTER_FIELD_NUMBER = 1;
13472     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
13473     /**
13474      * <code>required .Filter filter = 1;</code>
13475      */
hasFilter()13476     public boolean hasFilter() {
13477       return ((bitField0_ & 0x00000001) == 0x00000001);
13478     }
13479     /**
13480      * <code>required .Filter filter = 1;</code>
13481      */
getFilter()13482     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
13483       return filter_;
13484     }
13485     /**
13486      * <code>required .Filter filter = 1;</code>
13487      */
getFilterOrBuilder()13488     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
13489       return filter_;
13490     }
13491 
initFields()13492     private void initFields() {
13493       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
13494     }
13495     private byte memoizedIsInitialized = -1;
isInitialized()13496     public final boolean isInitialized() {
13497       byte isInitialized = memoizedIsInitialized;
13498       if (isInitialized != -1) return isInitialized == 1;
13499 
13500       if (!hasFilter()) {
13501         memoizedIsInitialized = 0;
13502         return false;
13503       }
13504       if (!getFilter().isInitialized()) {
13505         memoizedIsInitialized = 0;
13506         return false;
13507       }
13508       memoizedIsInitialized = 1;
13509       return true;
13510     }
13511 
writeTo(com.google.protobuf.CodedOutputStream output)13512     public void writeTo(com.google.protobuf.CodedOutputStream output)
13513                         throws java.io.IOException {
13514       getSerializedSize();
13515       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13516         output.writeMessage(1, filter_);
13517       }
13518       getUnknownFields().writeTo(output);
13519     }
13520 
13521     private int memoizedSerializedSize = -1;
getSerializedSize()13522     public int getSerializedSize() {
13523       int size = memoizedSerializedSize;
13524       if (size != -1) return size;
13525 
13526       size = 0;
13527       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13528         size += com.google.protobuf.CodedOutputStream
13529           .computeMessageSize(1, filter_);
13530       }
13531       size += getUnknownFields().getSerializedSize();
13532       memoizedSerializedSize = size;
13533       return size;
13534     }
13535 
13536     private static final long serialVersionUID = 0L;
13537     @java.lang.Override
writeReplace()13538     protected java.lang.Object writeReplace()
13539         throws java.io.ObjectStreamException {
13540       return super.writeReplace();
13541     }
13542 
13543     @java.lang.Override
equals(final java.lang.Object obj)13544     public boolean equals(final java.lang.Object obj) {
13545       if (obj == this) {
13546        return true;
13547       }
13548       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)) {
13549         return super.equals(obj);
13550       }
13551       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) obj;
13552 
13553       boolean result = true;
13554       result = result && (hasFilter() == other.hasFilter());
13555       if (hasFilter()) {
13556         result = result && getFilter()
13557             .equals(other.getFilter());
13558       }
13559       result = result &&
13560           getUnknownFields().equals(other.getUnknownFields());
13561       return result;
13562     }
13563 
13564     private int memoizedHashCode = 0;
13565     @java.lang.Override
hashCode()13566     public int hashCode() {
13567       if (memoizedHashCode != 0) {
13568         return memoizedHashCode;
13569       }
13570       int hash = 41;
13571       hash = (19 * hash) + getDescriptorForType().hashCode();
13572       if (hasFilter()) {
13573         hash = (37 * hash) + FILTER_FIELD_NUMBER;
13574         hash = (53 * hash) + getFilter().hashCode();
13575       }
13576       hash = (29 * hash) + getUnknownFields().hashCode();
13577       memoizedHashCode = hash;
13578       return hash;
13579     }
13580 
parseFrom( com.google.protobuf.ByteString data)13581     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
13582         com.google.protobuf.ByteString data)
13583         throws com.google.protobuf.InvalidProtocolBufferException {
13584       return PARSER.parseFrom(data);
13585     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13586     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
13587         com.google.protobuf.ByteString data,
13588         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13589         throws com.google.protobuf.InvalidProtocolBufferException {
13590       return PARSER.parseFrom(data, extensionRegistry);
13591     }
parseFrom(byte[] data)13592     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(byte[] data)
13593         throws com.google.protobuf.InvalidProtocolBufferException {
13594       return PARSER.parseFrom(data);
13595     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13596     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
13597         byte[] data,
13598         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13599         throws com.google.protobuf.InvalidProtocolBufferException {
13600       return PARSER.parseFrom(data, extensionRegistry);
13601     }
parseFrom(java.io.InputStream input)13602     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(java.io.InputStream input)
13603         throws java.io.IOException {
13604       return PARSER.parseFrom(input);
13605     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13606     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
13607         java.io.InputStream input,
13608         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13609         throws java.io.IOException {
13610       return PARSER.parseFrom(input, extensionRegistry);
13611     }
parseDelimitedFrom(java.io.InputStream input)13612     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(java.io.InputStream input)
13613         throws java.io.IOException {
13614       return PARSER.parseDelimitedFrom(input);
13615     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13616     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseDelimitedFrom(
13617         java.io.InputStream input,
13618         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13619         throws java.io.IOException {
13620       return PARSER.parseDelimitedFrom(input, extensionRegistry);
13621     }
parseFrom( com.google.protobuf.CodedInputStream input)13622     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
13623         com.google.protobuf.CodedInputStream input)
13624         throws java.io.IOException {
13625       return PARSER.parseFrom(input);
13626     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13627     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parseFrom(
13628         com.google.protobuf.CodedInputStream input,
13629         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13630         throws java.io.IOException {
13631       return PARSER.parseFrom(input, extensionRegistry);
13632     }
13633 
newBuilder()13634     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()13635     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype)13636     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter prototype) {
13637       return newBuilder().mergeFrom(prototype);
13638     }
toBuilder()13639     public Builder toBuilder() { return newBuilder(this); }
13640 
13641     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)13642     protected Builder newBuilderForType(
13643         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13644       Builder builder = new Builder(parent);
13645       return builder;
13646     }
13647     /**
13648      * Protobuf type {@code SkipFilter}
13649      */
13650     public static final class Builder extends
13651         com.google.protobuf.GeneratedMessage.Builder<Builder>
13652        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilterOrBuilder {
13653       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13654           getDescriptor() {
13655         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor;
13656       }
13657 
13658       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13659           internalGetFieldAccessorTable() {
13660         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_fieldAccessorTable
13661             .ensureFieldAccessorsInitialized(
13662                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.Builder.class);
13663       }
13664 
13665       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.newBuilder()
Builder()13666       private Builder() {
13667         maybeForceBuilderInitialization();
13668       }
13669 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)13670       private Builder(
13671           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13672         super(parent);
13673         maybeForceBuilderInitialization();
13674       }
maybeForceBuilderInitialization()13675       private void maybeForceBuilderInitialization() {
13676         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13677           getFilterFieldBuilder();
13678         }
13679       }
create()13680       private static Builder create() {
13681         return new Builder();
13682       }
13683 
clear()13684       public Builder clear() {
13685         super.clear();
13686         if (filterBuilder_ == null) {
13687           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
13688         } else {
13689           filterBuilder_.clear();
13690         }
13691         bitField0_ = (bitField0_ & ~0x00000001);
13692         return this;
13693       }
13694 
clone()13695       public Builder clone() {
13696         return create().mergeFrom(buildPartial());
13697       }
13698 
13699       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()13700           getDescriptorForType() {
13701         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_SkipFilter_descriptor;
13702       }
13703 
getDefaultInstanceForType()13704       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter getDefaultInstanceForType() {
13705         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance();
13706       }
13707 
build()13708       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter build() {
13709         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = buildPartial();
13710         if (!result.isInitialized()) {
13711           throw newUninitializedMessageException(result);
13712         }
13713         return result;
13714       }
13715 
buildPartial()13716       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter buildPartial() {
13717         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter(this);
13718         int from_bitField0_ = bitField0_;
13719         int to_bitField0_ = 0;
13720         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13721           to_bitField0_ |= 0x00000001;
13722         }
13723         if (filterBuilder_ == null) {
13724           result.filter_ = filter_;
13725         } else {
13726           result.filter_ = filterBuilder_.build();
13727         }
13728         result.bitField0_ = to_bitField0_;
13729         onBuilt();
13730         return result;
13731       }
13732 
mergeFrom(com.google.protobuf.Message other)13733       public Builder mergeFrom(com.google.protobuf.Message other) {
13734         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) {
13735           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter)other);
13736         } else {
13737           super.mergeFrom(other);
13738           return this;
13739         }
13740       }
13741 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other)13742       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter other) {
13743         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter.getDefaultInstance()) return this;
13744         if (other.hasFilter()) {
13745           mergeFilter(other.getFilter());
13746         }
13747         this.mergeUnknownFields(other.getUnknownFields());
13748         return this;
13749       }
13750 
isInitialized()13751       public final boolean isInitialized() {
13752         if (!hasFilter()) {
13753 
13754           return false;
13755         }
13756         if (!getFilter().isInitialized()) {
13757 
13758           return false;
13759         }
13760         return true;
13761       }
13762 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13763       public Builder mergeFrom(
13764           com.google.protobuf.CodedInputStream input,
13765           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13766           throws java.io.IOException {
13767         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter parsedMessage = null;
13768         try {
13769           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13770         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13771           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.SkipFilter) e.getUnfinishedMessage();
13772           throw e;
13773         } finally {
13774           if (parsedMessage != null) {
13775             mergeFrom(parsedMessage);
13776           }
13777         }
13778         return this;
13779       }
13780       private int bitField0_;
13781 
13782       // required .Filter filter = 1;
13783       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
13784       private com.google.protobuf.SingleFieldBuilder<
13785           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
13786       /**
13787        * <code>required .Filter filter = 1;</code>
13788        */
hasFilter()13789       public boolean hasFilter() {
13790         return ((bitField0_ & 0x00000001) == 0x00000001);
13791       }
13792       /**
13793        * <code>required .Filter filter = 1;</code>
13794        */
getFilter()13795       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
13796         if (filterBuilder_ == null) {
13797           return filter_;
13798         } else {
13799           return filterBuilder_.getMessage();
13800         }
13801       }
13802       /**
13803        * <code>required .Filter filter = 1;</code>
13804        */
setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)13805       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
13806         if (filterBuilder_ == null) {
13807           if (value == null) {
13808             throw new NullPointerException();
13809           }
13810           filter_ = value;
13811           onChanged();
13812         } else {
13813           filterBuilder_.setMessage(value);
13814         }
13815         bitField0_ |= 0x00000001;
13816         return this;
13817       }
13818       /**
13819        * <code>required .Filter filter = 1;</code>
13820        */
setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)13821       public Builder setFilter(
13822           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
13823         if (filterBuilder_ == null) {
13824           filter_ = builderForValue.build();
13825           onChanged();
13826         } else {
13827           filterBuilder_.setMessage(builderForValue.build());
13828         }
13829         bitField0_ |= 0x00000001;
13830         return this;
13831       }
13832       /**
13833        * <code>required .Filter filter = 1;</code>
13834        */
mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)13835       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
13836         if (filterBuilder_ == null) {
13837           if (((bitField0_ & 0x00000001) == 0x00000001) &&
13838               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
13839             filter_ =
13840               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
13841           } else {
13842             filter_ = value;
13843           }
13844           onChanged();
13845         } else {
13846           filterBuilder_.mergeFrom(value);
13847         }
13848         bitField0_ |= 0x00000001;
13849         return this;
13850       }
13851       /**
13852        * <code>required .Filter filter = 1;</code>
13853        */
clearFilter()13854       public Builder clearFilter() {
13855         if (filterBuilder_ == null) {
13856           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
13857           onChanged();
13858         } else {
13859           filterBuilder_.clear();
13860         }
13861         bitField0_ = (bitField0_ & ~0x00000001);
13862         return this;
13863       }
13864       /**
13865        * <code>required .Filter filter = 1;</code>
13866        */
getFilterBuilder()13867       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
13868         bitField0_ |= 0x00000001;
13869         onChanged();
13870         return getFilterFieldBuilder().getBuilder();
13871       }
13872       /**
13873        * <code>required .Filter filter = 1;</code>
13874        */
getFilterOrBuilder()13875       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
13876         if (filterBuilder_ != null) {
13877           return filterBuilder_.getMessageOrBuilder();
13878         } else {
13879           return filter_;
13880         }
13881       }
13882       /**
13883        * <code>required .Filter filter = 1;</code>
13884        */
13885       private com.google.protobuf.SingleFieldBuilder<
13886           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder()13887           getFilterFieldBuilder() {
13888         if (filterBuilder_ == null) {
13889           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13890               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
13891                   filter_,
13892                   getParentForChildren(),
13893                   isClean());
13894           filter_ = null;
13895         }
13896         return filterBuilder_;
13897       }
13898 
13899       // @@protoc_insertion_point(builder_scope:SkipFilter)
13900     }
13901 
13902     static {
13903       defaultInstance = new SkipFilter(true);
defaultInstance.initFields()13904       defaultInstance.initFields();
13905     }
13906 
13907     // @@protoc_insertion_point(class_scope:SkipFilter)
13908   }
13909 
13910   public interface TimestampsFilterOrBuilder
13911       extends com.google.protobuf.MessageOrBuilder {
13912 
13913     // repeated int64 timestamps = 1 [packed = true];
13914     /**
13915      * <code>repeated int64 timestamps = 1 [packed = true];</code>
13916      */
getTimestampsList()13917     java.util.List<java.lang.Long> getTimestampsList();
13918     /**
13919      * <code>repeated int64 timestamps = 1 [packed = true];</code>
13920      */
getTimestampsCount()13921     int getTimestampsCount();
13922     /**
13923      * <code>repeated int64 timestamps = 1 [packed = true];</code>
13924      */
getTimestamps(int index)13925     long getTimestamps(int index);
13926   }
13927   /**
13928    * Protobuf type {@code TimestampsFilter}
13929    */
13930   public static final class TimestampsFilter extends
13931       com.google.protobuf.GeneratedMessage
13932       implements TimestampsFilterOrBuilder {
13933     // Use TimestampsFilter.newBuilder() to construct.
TimestampsFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)13934     private TimestampsFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13935       super(builder);
13936       this.unknownFields = builder.getUnknownFields();
13937     }
TimestampsFilter(boolean noInit)13938     private TimestampsFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13939 
13940     private static final TimestampsFilter defaultInstance;
getDefaultInstance()13941     public static TimestampsFilter getDefaultInstance() {
13942       return defaultInstance;
13943     }
13944 
getDefaultInstanceForType()13945     public TimestampsFilter getDefaultInstanceForType() {
13946       return defaultInstance;
13947     }
13948 
13949     private final com.google.protobuf.UnknownFieldSet unknownFields;
13950     @java.lang.Override
13951     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()13952         getUnknownFields() {
13953       return this.unknownFields;
13954     }
TimestampsFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13955     private TimestampsFilter(
13956         com.google.protobuf.CodedInputStream input,
13957         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13958         throws com.google.protobuf.InvalidProtocolBufferException {
13959       initFields();
13960       int mutable_bitField0_ = 0;
13961       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13962           com.google.protobuf.UnknownFieldSet.newBuilder();
13963       try {
13964         boolean done = false;
13965         while (!done) {
13966           int tag = input.readTag();
13967           switch (tag) {
13968             case 0:
13969               done = true;
13970               break;
13971             default: {
13972               if (!parseUnknownField(input, unknownFields,
13973                                      extensionRegistry, tag)) {
13974                 done = true;
13975               }
13976               break;
13977             }
13978             case 8: {
13979               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
13980                 timestamps_ = new java.util.ArrayList<java.lang.Long>();
13981                 mutable_bitField0_ |= 0x00000001;
13982               }
13983               timestamps_.add(input.readInt64());
13984               break;
13985             }
13986             case 10: {
13987               int length = input.readRawVarint32();
13988               int limit = input.pushLimit(length);
13989               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
13990                 timestamps_ = new java.util.ArrayList<java.lang.Long>();
13991                 mutable_bitField0_ |= 0x00000001;
13992               }
13993               while (input.getBytesUntilLimit() > 0) {
13994                 timestamps_.add(input.readInt64());
13995               }
13996               input.popLimit(limit);
13997               break;
13998             }
13999           }
14000         }
14001       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14002         throw e.setUnfinishedMessage(this);
14003       } catch (java.io.IOException e) {
14004         throw new com.google.protobuf.InvalidProtocolBufferException(
14005             e.getMessage()).setUnfinishedMessage(this);
14006       } finally {
14007         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
14008           timestamps_ = java.util.Collections.unmodifiableList(timestamps_);
14009         }
14010         this.unknownFields = unknownFields.build();
14011         makeExtensionsImmutable();
14012       }
14013     }
14014     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14015         getDescriptor() {
14016       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor;
14017     }
14018 
14019     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14020         internalGetFieldAccessorTable() {
14021       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable
14022           .ensureFieldAccessorsInitialized(
14023               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class);
14024     }
14025 
14026     public static com.google.protobuf.Parser<TimestampsFilter> PARSER =
14027         new com.google.protobuf.AbstractParser<TimestampsFilter>() {
14028       public TimestampsFilter parsePartialFrom(
14029           com.google.protobuf.CodedInputStream input,
14030           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14031           throws com.google.protobuf.InvalidProtocolBufferException {
14032         return new TimestampsFilter(input, extensionRegistry);
14033       }
14034     };
14035 
14036     @java.lang.Override
getParserForType()14037     public com.google.protobuf.Parser<TimestampsFilter> getParserForType() {
14038       return PARSER;
14039     }
14040 
14041     // repeated int64 timestamps = 1 [packed = true];
14042     public static final int TIMESTAMPS_FIELD_NUMBER = 1;
14043     private java.util.List<java.lang.Long> timestamps_;
14044     /**
14045      * <code>repeated int64 timestamps = 1 [packed = true];</code>
14046      */
14047     public java.util.List<java.lang.Long>
getTimestampsList()14048         getTimestampsList() {
14049       return timestamps_;
14050     }
14051     /**
14052      * <code>repeated int64 timestamps = 1 [packed = true];</code>
14053      */
getTimestampsCount()14054     public int getTimestampsCount() {
14055       return timestamps_.size();
14056     }
14057     /**
14058      * <code>repeated int64 timestamps = 1 [packed = true];</code>
14059      */
getTimestamps(int index)14060     public long getTimestamps(int index) {
14061       return timestamps_.get(index);
14062     }
14063     private int timestampsMemoizedSerializedSize = -1;
14064 
initFields()14065     private void initFields() {
14066       timestamps_ = java.util.Collections.emptyList();
14067     }
14068     private byte memoizedIsInitialized = -1;
isInitialized()14069     public final boolean isInitialized() {
14070       byte isInitialized = memoizedIsInitialized;
14071       if (isInitialized != -1) return isInitialized == 1;
14072 
14073       memoizedIsInitialized = 1;
14074       return true;
14075     }
14076 
writeTo(com.google.protobuf.CodedOutputStream output)14077     public void writeTo(com.google.protobuf.CodedOutputStream output)
14078                         throws java.io.IOException {
14079       getSerializedSize();
14080       if (getTimestampsList().size() > 0) {
14081         output.writeRawVarint32(10);
14082         output.writeRawVarint32(timestampsMemoizedSerializedSize);
14083       }
14084       for (int i = 0; i < timestamps_.size(); i++) {
14085         output.writeInt64NoTag(timestamps_.get(i));
14086       }
14087       getUnknownFields().writeTo(output);
14088     }
14089 
14090     private int memoizedSerializedSize = -1;
getSerializedSize()14091     public int getSerializedSize() {
14092       int size = memoizedSerializedSize;
14093       if (size != -1) return size;
14094 
14095       size = 0;
14096       {
14097         int dataSize = 0;
14098         for (int i = 0; i < timestamps_.size(); i++) {
14099           dataSize += com.google.protobuf.CodedOutputStream
14100             .computeInt64SizeNoTag(timestamps_.get(i));
14101         }
14102         size += dataSize;
14103         if (!getTimestampsList().isEmpty()) {
14104           size += 1;
14105           size += com.google.protobuf.CodedOutputStream
14106               .computeInt32SizeNoTag(dataSize);
14107         }
14108         timestampsMemoizedSerializedSize = dataSize;
14109       }
14110       size += getUnknownFields().getSerializedSize();
14111       memoizedSerializedSize = size;
14112       return size;
14113     }
14114 
14115     private static final long serialVersionUID = 0L;
14116     @java.lang.Override
writeReplace()14117     protected java.lang.Object writeReplace()
14118         throws java.io.ObjectStreamException {
14119       return super.writeReplace();
14120     }
14121 
14122     @java.lang.Override
equals(final java.lang.Object obj)14123     public boolean equals(final java.lang.Object obj) {
14124       if (obj == this) {
14125        return true;
14126       }
14127       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)) {
14128         return super.equals(obj);
14129       }
14130       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) obj;
14131 
14132       boolean result = true;
14133       result = result && getTimestampsList()
14134           .equals(other.getTimestampsList());
14135       result = result &&
14136           getUnknownFields().equals(other.getUnknownFields());
14137       return result;
14138     }
14139 
14140     private int memoizedHashCode = 0;
14141     @java.lang.Override
hashCode()14142     public int hashCode() {
14143       if (memoizedHashCode != 0) {
14144         return memoizedHashCode;
14145       }
14146       int hash = 41;
14147       hash = (19 * hash) + getDescriptorForType().hashCode();
14148       if (getTimestampsCount() > 0) {
14149         hash = (37 * hash) + TIMESTAMPS_FIELD_NUMBER;
14150         hash = (53 * hash) + getTimestampsList().hashCode();
14151       }
14152       hash = (29 * hash) + getUnknownFields().hashCode();
14153       memoizedHashCode = hash;
14154       return hash;
14155     }
14156 
parseFrom( com.google.protobuf.ByteString data)14157     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
14158         com.google.protobuf.ByteString data)
14159         throws com.google.protobuf.InvalidProtocolBufferException {
14160       return PARSER.parseFrom(data);
14161     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14162     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
14163         com.google.protobuf.ByteString data,
14164         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14165         throws com.google.protobuf.InvalidProtocolBufferException {
14166       return PARSER.parseFrom(data, extensionRegistry);
14167     }
parseFrom(byte[] data)14168     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(byte[] data)
14169         throws com.google.protobuf.InvalidProtocolBufferException {
14170       return PARSER.parseFrom(data);
14171     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14172     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
14173         byte[] data,
14174         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14175         throws com.google.protobuf.InvalidProtocolBufferException {
14176       return PARSER.parseFrom(data, extensionRegistry);
14177     }
parseFrom(java.io.InputStream input)14178     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(java.io.InputStream input)
14179         throws java.io.IOException {
14180       return PARSER.parseFrom(input);
14181     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14182     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
14183         java.io.InputStream input,
14184         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14185         throws java.io.IOException {
14186       return PARSER.parseFrom(input, extensionRegistry);
14187     }
parseDelimitedFrom(java.io.InputStream input)14188     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(java.io.InputStream input)
14189         throws java.io.IOException {
14190       return PARSER.parseDelimitedFrom(input);
14191     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14192     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseDelimitedFrom(
14193         java.io.InputStream input,
14194         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14195         throws java.io.IOException {
14196       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14197     }
parseFrom( com.google.protobuf.CodedInputStream input)14198     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
14199         com.google.protobuf.CodedInputStream input)
14200         throws java.io.IOException {
14201       return PARSER.parseFrom(input);
14202     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14203     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parseFrom(
14204         com.google.protobuf.CodedInputStream input,
14205         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14206         throws java.io.IOException {
14207       return PARSER.parseFrom(input, extensionRegistry);
14208     }
14209 
newBuilder()14210     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()14211     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype)14212     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter prototype) {
14213       return newBuilder().mergeFrom(prototype);
14214     }
toBuilder()14215     public Builder toBuilder() { return newBuilder(this); }
14216 
14217     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)14218     protected Builder newBuilderForType(
14219         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14220       Builder builder = new Builder(parent);
14221       return builder;
14222     }
14223     /**
14224      * Protobuf type {@code TimestampsFilter}
14225      */
14226     public static final class Builder extends
14227         com.google.protobuf.GeneratedMessage.Builder<Builder>
14228        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilterOrBuilder {
14229       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14230           getDescriptor() {
14231         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor;
14232       }
14233 
14234       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14235           internalGetFieldAccessorTable() {
14236         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_fieldAccessorTable
14237             .ensureFieldAccessorsInitialized(
14238                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.Builder.class);
14239       }
14240 
14241       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.newBuilder()
Builder()14242       private Builder() {
14243         maybeForceBuilderInitialization();
14244       }
14245 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)14246       private Builder(
14247           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14248         super(parent);
14249         maybeForceBuilderInitialization();
14250       }
maybeForceBuilderInitialization()14251       private void maybeForceBuilderInitialization() {
14252         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14253         }
14254       }
create()14255       private static Builder create() {
14256         return new Builder();
14257       }
14258 
clear()14259       public Builder clear() {
14260         super.clear();
14261         timestamps_ = java.util.Collections.emptyList();
14262         bitField0_ = (bitField0_ & ~0x00000001);
14263         return this;
14264       }
14265 
clone()14266       public Builder clone() {
14267         return create().mergeFrom(buildPartial());
14268       }
14269 
14270       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()14271           getDescriptorForType() {
14272         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_TimestampsFilter_descriptor;
14273       }
14274 
getDefaultInstanceForType()14275       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter getDefaultInstanceForType() {
14276         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance();
14277       }
14278 
build()14279       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter build() {
14280         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = buildPartial();
14281         if (!result.isInitialized()) {
14282           throw newUninitializedMessageException(result);
14283         }
14284         return result;
14285       }
14286 
buildPartial()14287       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter buildPartial() {
14288         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter(this);
14289         int from_bitField0_ = bitField0_;
14290         if (((bitField0_ & 0x00000001) == 0x00000001)) {
14291           timestamps_ = java.util.Collections.unmodifiableList(timestamps_);
14292           bitField0_ = (bitField0_ & ~0x00000001);
14293         }
14294         result.timestamps_ = timestamps_;
14295         onBuilt();
14296         return result;
14297       }
14298 
mergeFrom(com.google.protobuf.Message other)14299       public Builder mergeFrom(com.google.protobuf.Message other) {
14300         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) {
14301           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter)other);
14302         } else {
14303           super.mergeFrom(other);
14304           return this;
14305         }
14306       }
14307 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other)14308       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter other) {
14309         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter.getDefaultInstance()) return this;
14310         if (!other.timestamps_.isEmpty()) {
14311           if (timestamps_.isEmpty()) {
14312             timestamps_ = other.timestamps_;
14313             bitField0_ = (bitField0_ & ~0x00000001);
14314           } else {
14315             ensureTimestampsIsMutable();
14316             timestamps_.addAll(other.timestamps_);
14317           }
14318           onChanged();
14319         }
14320         this.mergeUnknownFields(other.getUnknownFields());
14321         return this;
14322       }
14323 
isInitialized()14324       public final boolean isInitialized() {
14325         return true;
14326       }
14327 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14328       public Builder mergeFrom(
14329           com.google.protobuf.CodedInputStream input,
14330           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14331           throws java.io.IOException {
14332         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter parsedMessage = null;
14333         try {
14334           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14335         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14336           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.TimestampsFilter) e.getUnfinishedMessage();
14337           throw e;
14338         } finally {
14339           if (parsedMessage != null) {
14340             mergeFrom(parsedMessage);
14341           }
14342         }
14343         return this;
14344       }
14345       private int bitField0_;
14346 
14347       // repeated int64 timestamps = 1 [packed = true];
14348       private java.util.List<java.lang.Long> timestamps_ = java.util.Collections.emptyList();
ensureTimestampsIsMutable()14349       private void ensureTimestampsIsMutable() {
14350         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
14351           timestamps_ = new java.util.ArrayList<java.lang.Long>(timestamps_);
14352           bitField0_ |= 0x00000001;
14353          }
14354       }
14355       /**
14356        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14357        */
14358       public java.util.List<java.lang.Long>
getTimestampsList()14359           getTimestampsList() {
14360         return java.util.Collections.unmodifiableList(timestamps_);
14361       }
14362       /**
14363        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14364        */
getTimestampsCount()14365       public int getTimestampsCount() {
14366         return timestamps_.size();
14367       }
14368       /**
14369        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14370        */
getTimestamps(int index)14371       public long getTimestamps(int index) {
14372         return timestamps_.get(index);
14373       }
14374       /**
14375        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14376        */
setTimestamps( int index, long value)14377       public Builder setTimestamps(
14378           int index, long value) {
14379         ensureTimestampsIsMutable();
14380         timestamps_.set(index, value);
14381         onChanged();
14382         return this;
14383       }
14384       /**
14385        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14386        */
addTimestamps(long value)14387       public Builder addTimestamps(long value) {
14388         ensureTimestampsIsMutable();
14389         timestamps_.add(value);
14390         onChanged();
14391         return this;
14392       }
14393       /**
14394        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14395        */
addAllTimestamps( java.lang.Iterable<? extends java.lang.Long> values)14396       public Builder addAllTimestamps(
14397           java.lang.Iterable<? extends java.lang.Long> values) {
14398         ensureTimestampsIsMutable();
14399         super.addAll(values, timestamps_);
14400         onChanged();
14401         return this;
14402       }
14403       /**
14404        * <code>repeated int64 timestamps = 1 [packed = true];</code>
14405        */
clearTimestamps()14406       public Builder clearTimestamps() {
14407         timestamps_ = java.util.Collections.emptyList();
14408         bitField0_ = (bitField0_ & ~0x00000001);
14409         onChanged();
14410         return this;
14411       }
14412 
14413       // @@protoc_insertion_point(builder_scope:TimestampsFilter)
14414     }
14415 
14416     static {
14417       defaultInstance = new TimestampsFilter(true);
defaultInstance.initFields()14418       defaultInstance.initFields();
14419     }
14420 
14421     // @@protoc_insertion_point(class_scope:TimestampsFilter)
14422   }
14423 
14424   public interface ValueFilterOrBuilder
14425       extends com.google.protobuf.MessageOrBuilder {
14426 
14427     // required .CompareFilter compare_filter = 1;
14428     /**
14429      * <code>required .CompareFilter compare_filter = 1;</code>
14430      */
hasCompareFilter()14431     boolean hasCompareFilter();
14432     /**
14433      * <code>required .CompareFilter compare_filter = 1;</code>
14434      */
getCompareFilter()14435     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter();
14436     /**
14437      * <code>required .CompareFilter compare_filter = 1;</code>
14438      */
getCompareFilterOrBuilder()14439     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder();
14440   }
14441   /**
14442    * Protobuf type {@code ValueFilter}
14443    */
14444   public static final class ValueFilter extends
14445       com.google.protobuf.GeneratedMessage
14446       implements ValueFilterOrBuilder {
14447     // Use ValueFilter.newBuilder() to construct.
ValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)14448     private ValueFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14449       super(builder);
14450       this.unknownFields = builder.getUnknownFields();
14451     }
ValueFilter(boolean noInit)14452     private ValueFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14453 
14454     private static final ValueFilter defaultInstance;
getDefaultInstance()14455     public static ValueFilter getDefaultInstance() {
14456       return defaultInstance;
14457     }
14458 
getDefaultInstanceForType()14459     public ValueFilter getDefaultInstanceForType() {
14460       return defaultInstance;
14461     }
14462 
14463     private final com.google.protobuf.UnknownFieldSet unknownFields;
14464     @java.lang.Override
14465     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()14466         getUnknownFields() {
14467       return this.unknownFields;
14468     }
ValueFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14469     private ValueFilter(
14470         com.google.protobuf.CodedInputStream input,
14471         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14472         throws com.google.protobuf.InvalidProtocolBufferException {
14473       initFields();
14474       int mutable_bitField0_ = 0;
14475       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
14476           com.google.protobuf.UnknownFieldSet.newBuilder();
14477       try {
14478         boolean done = false;
14479         while (!done) {
14480           int tag = input.readTag();
14481           switch (tag) {
14482             case 0:
14483               done = true;
14484               break;
14485             default: {
14486               if (!parseUnknownField(input, unknownFields,
14487                                      extensionRegistry, tag)) {
14488                 done = true;
14489               }
14490               break;
14491             }
14492             case 10: {
14493               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null;
14494               if (((bitField0_ & 0x00000001) == 0x00000001)) {
14495                 subBuilder = compareFilter_.toBuilder();
14496               }
14497               compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry);
14498               if (subBuilder != null) {
14499                 subBuilder.mergeFrom(compareFilter_);
14500                 compareFilter_ = subBuilder.buildPartial();
14501               }
14502               bitField0_ |= 0x00000001;
14503               break;
14504             }
14505           }
14506         }
14507       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14508         throw e.setUnfinishedMessage(this);
14509       } catch (java.io.IOException e) {
14510         throw new com.google.protobuf.InvalidProtocolBufferException(
14511             e.getMessage()).setUnfinishedMessage(this);
14512       } finally {
14513         this.unknownFields = unknownFields.build();
14514         makeExtensionsImmutable();
14515       }
14516     }
14517     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14518         getDescriptor() {
14519       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor;
14520     }
14521 
14522     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14523         internalGetFieldAccessorTable() {
14524       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable
14525           .ensureFieldAccessorsInitialized(
14526               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class);
14527     }
14528 
14529     public static com.google.protobuf.Parser<ValueFilter> PARSER =
14530         new com.google.protobuf.AbstractParser<ValueFilter>() {
14531       public ValueFilter parsePartialFrom(
14532           com.google.protobuf.CodedInputStream input,
14533           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14534           throws com.google.protobuf.InvalidProtocolBufferException {
14535         return new ValueFilter(input, extensionRegistry);
14536       }
14537     };
14538 
14539     @java.lang.Override
getParserForType()14540     public com.google.protobuf.Parser<ValueFilter> getParserForType() {
14541       return PARSER;
14542     }
14543 
14544     private int bitField0_;
14545     // required .CompareFilter compare_filter = 1;
14546     public static final int COMPARE_FILTER_FIELD_NUMBER = 1;
14547     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_;
14548     /**
14549      * <code>required .CompareFilter compare_filter = 1;</code>
14550      */
hasCompareFilter()14551     public boolean hasCompareFilter() {
14552       return ((bitField0_ & 0x00000001) == 0x00000001);
14553     }
14554     /**
14555      * <code>required .CompareFilter compare_filter = 1;</code>
14556      */
getCompareFilter()14557     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
14558       return compareFilter_;
14559     }
14560     /**
14561      * <code>required .CompareFilter compare_filter = 1;</code>
14562      */
getCompareFilterOrBuilder()14563     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
14564       return compareFilter_;
14565     }
14566 
initFields()14567     private void initFields() {
14568       compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
14569     }
14570     private byte memoizedIsInitialized = -1;
isInitialized()14571     public final boolean isInitialized() {
14572       byte isInitialized = memoizedIsInitialized;
14573       if (isInitialized != -1) return isInitialized == 1;
14574 
14575       if (!hasCompareFilter()) {
14576         memoizedIsInitialized = 0;
14577         return false;
14578       }
14579       if (!getCompareFilter().isInitialized()) {
14580         memoizedIsInitialized = 0;
14581         return false;
14582       }
14583       memoizedIsInitialized = 1;
14584       return true;
14585     }
14586 
writeTo(com.google.protobuf.CodedOutputStream output)14587     public void writeTo(com.google.protobuf.CodedOutputStream output)
14588                         throws java.io.IOException {
14589       getSerializedSize();
14590       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14591         output.writeMessage(1, compareFilter_);
14592       }
14593       getUnknownFields().writeTo(output);
14594     }
14595 
14596     private int memoizedSerializedSize = -1;
getSerializedSize()14597     public int getSerializedSize() {
14598       int size = memoizedSerializedSize;
14599       if (size != -1) return size;
14600 
14601       size = 0;
14602       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14603         size += com.google.protobuf.CodedOutputStream
14604           .computeMessageSize(1, compareFilter_);
14605       }
14606       size += getUnknownFields().getSerializedSize();
14607       memoizedSerializedSize = size;
14608       return size;
14609     }
14610 
14611     private static final long serialVersionUID = 0L;
14612     @java.lang.Override
writeReplace()14613     protected java.lang.Object writeReplace()
14614         throws java.io.ObjectStreamException {
14615       return super.writeReplace();
14616     }
14617 
14618     @java.lang.Override
equals(final java.lang.Object obj)14619     public boolean equals(final java.lang.Object obj) {
14620       if (obj == this) {
14621        return true;
14622       }
14623       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)) {
14624         return super.equals(obj);
14625       }
14626       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) obj;
14627 
14628       boolean result = true;
14629       result = result && (hasCompareFilter() == other.hasCompareFilter());
14630       if (hasCompareFilter()) {
14631         result = result && getCompareFilter()
14632             .equals(other.getCompareFilter());
14633       }
14634       result = result &&
14635           getUnknownFields().equals(other.getUnknownFields());
14636       return result;
14637     }
14638 
14639     private int memoizedHashCode = 0;
14640     @java.lang.Override
hashCode()14641     public int hashCode() {
14642       if (memoizedHashCode != 0) {
14643         return memoizedHashCode;
14644       }
14645       int hash = 41;
14646       hash = (19 * hash) + getDescriptorForType().hashCode();
14647       if (hasCompareFilter()) {
14648         hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER;
14649         hash = (53 * hash) + getCompareFilter().hashCode();
14650       }
14651       hash = (29 * hash) + getUnknownFields().hashCode();
14652       memoizedHashCode = hash;
14653       return hash;
14654     }
14655 
parseFrom( com.google.protobuf.ByteString data)14656     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
14657         com.google.protobuf.ByteString data)
14658         throws com.google.protobuf.InvalidProtocolBufferException {
14659       return PARSER.parseFrom(data);
14660     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14661     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
14662         com.google.protobuf.ByteString data,
14663         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14664         throws com.google.protobuf.InvalidProtocolBufferException {
14665       return PARSER.parseFrom(data, extensionRegistry);
14666     }
parseFrom(byte[] data)14667     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(byte[] data)
14668         throws com.google.protobuf.InvalidProtocolBufferException {
14669       return PARSER.parseFrom(data);
14670     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14671     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
14672         byte[] data,
14673         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14674         throws com.google.protobuf.InvalidProtocolBufferException {
14675       return PARSER.parseFrom(data, extensionRegistry);
14676     }
parseFrom(java.io.InputStream input)14677     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(java.io.InputStream input)
14678         throws java.io.IOException {
14679       return PARSER.parseFrom(input);
14680     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14681     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
14682         java.io.InputStream input,
14683         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14684         throws java.io.IOException {
14685       return PARSER.parseFrom(input, extensionRegistry);
14686     }
parseDelimitedFrom(java.io.InputStream input)14687     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(java.io.InputStream input)
14688         throws java.io.IOException {
14689       return PARSER.parseDelimitedFrom(input);
14690     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14691     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseDelimitedFrom(
14692         java.io.InputStream input,
14693         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14694         throws java.io.IOException {
14695       return PARSER.parseDelimitedFrom(input, extensionRegistry);
14696     }
parseFrom( com.google.protobuf.CodedInputStream input)14697     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
14698         com.google.protobuf.CodedInputStream input)
14699         throws java.io.IOException {
14700       return PARSER.parseFrom(input);
14701     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14702     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parseFrom(
14703         com.google.protobuf.CodedInputStream input,
14704         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14705         throws java.io.IOException {
14706       return PARSER.parseFrom(input, extensionRegistry);
14707     }
14708 
newBuilder()14709     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()14710     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype)14711     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter prototype) {
14712       return newBuilder().mergeFrom(prototype);
14713     }
toBuilder()14714     public Builder toBuilder() { return newBuilder(this); }
14715 
14716     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)14717     protected Builder newBuilderForType(
14718         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14719       Builder builder = new Builder(parent);
14720       return builder;
14721     }
14722     /**
14723      * Protobuf type {@code ValueFilter}
14724      */
14725     public static final class Builder extends
14726         com.google.protobuf.GeneratedMessage.Builder<Builder>
14727        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilterOrBuilder {
14728       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14729           getDescriptor() {
14730         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor;
14731       }
14732 
14733       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14734           internalGetFieldAccessorTable() {
14735         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_fieldAccessorTable
14736             .ensureFieldAccessorsInitialized(
14737                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.Builder.class);
14738       }
14739 
14740       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.newBuilder()
Builder()14741       private Builder() {
14742         maybeForceBuilderInitialization();
14743       }
14744 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)14745       private Builder(
14746           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
14747         super(parent);
14748         maybeForceBuilderInitialization();
14749       }
maybeForceBuilderInitialization()14750       private void maybeForceBuilderInitialization() {
14751         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
14752           getCompareFilterFieldBuilder();
14753         }
14754       }
create()14755       private static Builder create() {
14756         return new Builder();
14757       }
14758 
clear()14759       public Builder clear() {
14760         super.clear();
14761         if (compareFilterBuilder_ == null) {
14762           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
14763         } else {
14764           compareFilterBuilder_.clear();
14765         }
14766         bitField0_ = (bitField0_ & ~0x00000001);
14767         return this;
14768       }
14769 
clone()14770       public Builder clone() {
14771         return create().mergeFrom(buildPartial());
14772       }
14773 
14774       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()14775           getDescriptorForType() {
14776         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ValueFilter_descriptor;
14777       }
14778 
getDefaultInstanceForType()14779       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter getDefaultInstanceForType() {
14780         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance();
14781       }
14782 
build()14783       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter build() {
14784         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = buildPartial();
14785         if (!result.isInitialized()) {
14786           throw newUninitializedMessageException(result);
14787         }
14788         return result;
14789       }
14790 
buildPartial()14791       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter buildPartial() {
14792         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter(this);
14793         int from_bitField0_ = bitField0_;
14794         int to_bitField0_ = 0;
14795         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
14796           to_bitField0_ |= 0x00000001;
14797         }
14798         if (compareFilterBuilder_ == null) {
14799           result.compareFilter_ = compareFilter_;
14800         } else {
14801           result.compareFilter_ = compareFilterBuilder_.build();
14802         }
14803         result.bitField0_ = to_bitField0_;
14804         onBuilt();
14805         return result;
14806       }
14807 
mergeFrom(com.google.protobuf.Message other)14808       public Builder mergeFrom(com.google.protobuf.Message other) {
14809         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) {
14810           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter)other);
14811         } else {
14812           super.mergeFrom(other);
14813           return this;
14814         }
14815       }
14816 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other)14817       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter other) {
14818         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter.getDefaultInstance()) return this;
14819         if (other.hasCompareFilter()) {
14820           mergeCompareFilter(other.getCompareFilter());
14821         }
14822         this.mergeUnknownFields(other.getUnknownFields());
14823         return this;
14824       }
14825 
isInitialized()14826       public final boolean isInitialized() {
14827         if (!hasCompareFilter()) {
14828 
14829           return false;
14830         }
14831         if (!getCompareFilter().isInitialized()) {
14832 
14833           return false;
14834         }
14835         return true;
14836       }
14837 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14838       public Builder mergeFrom(
14839           com.google.protobuf.CodedInputStream input,
14840           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14841           throws java.io.IOException {
14842         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter parsedMessage = null;
14843         try {
14844           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
14845         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14846           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ValueFilter) e.getUnfinishedMessage();
14847           throw e;
14848         } finally {
14849           if (parsedMessage != null) {
14850             mergeFrom(parsedMessage);
14851           }
14852         }
14853         return this;
14854       }
14855       private int bitField0_;
14856 
14857       // required .CompareFilter compare_filter = 1;
14858       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
14859       private com.google.protobuf.SingleFieldBuilder<
14860           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_;
14861       /**
14862        * <code>required .CompareFilter compare_filter = 1;</code>
14863        */
hasCompareFilter()14864       public boolean hasCompareFilter() {
14865         return ((bitField0_ & 0x00000001) == 0x00000001);
14866       }
14867       /**
14868        * <code>required .CompareFilter compare_filter = 1;</code>
14869        */
getCompareFilter()14870       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() {
14871         if (compareFilterBuilder_ == null) {
14872           return compareFilter_;
14873         } else {
14874           return compareFilterBuilder_.getMessage();
14875         }
14876       }
14877       /**
14878        * <code>required .CompareFilter compare_filter = 1;</code>
14879        */
setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)14880       public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
14881         if (compareFilterBuilder_ == null) {
14882           if (value == null) {
14883             throw new NullPointerException();
14884           }
14885           compareFilter_ = value;
14886           onChanged();
14887         } else {
14888           compareFilterBuilder_.setMessage(value);
14889         }
14890         bitField0_ |= 0x00000001;
14891         return this;
14892       }
14893       /**
14894        * <code>required .CompareFilter compare_filter = 1;</code>
14895        */
setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)14896       public Builder setCompareFilter(
14897           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) {
14898         if (compareFilterBuilder_ == null) {
14899           compareFilter_ = builderForValue.build();
14900           onChanged();
14901         } else {
14902           compareFilterBuilder_.setMessage(builderForValue.build());
14903         }
14904         bitField0_ |= 0x00000001;
14905         return this;
14906       }
14907       /**
14908        * <code>required .CompareFilter compare_filter = 1;</code>
14909        */
mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)14910       public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) {
14911         if (compareFilterBuilder_ == null) {
14912           if (((bitField0_ & 0x00000001) == 0x00000001) &&
14913               compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) {
14914             compareFilter_ =
14915               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial();
14916           } else {
14917             compareFilter_ = value;
14918           }
14919           onChanged();
14920         } else {
14921           compareFilterBuilder_.mergeFrom(value);
14922         }
14923         bitField0_ |= 0x00000001;
14924         return this;
14925       }
14926       /**
14927        * <code>required .CompareFilter compare_filter = 1;</code>
14928        */
clearCompareFilter()14929       public Builder clearCompareFilter() {
14930         if (compareFilterBuilder_ == null) {
14931           compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance();
14932           onChanged();
14933         } else {
14934           compareFilterBuilder_.clear();
14935         }
14936         bitField0_ = (bitField0_ & ~0x00000001);
14937         return this;
14938       }
14939       /**
14940        * <code>required .CompareFilter compare_filter = 1;</code>
14941        */
getCompareFilterBuilder()14942       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() {
14943         bitField0_ |= 0x00000001;
14944         onChanged();
14945         return getCompareFilterFieldBuilder().getBuilder();
14946       }
14947       /**
14948        * <code>required .CompareFilter compare_filter = 1;</code>
14949        */
getCompareFilterOrBuilder()14950       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() {
14951         if (compareFilterBuilder_ != null) {
14952           return compareFilterBuilder_.getMessageOrBuilder();
14953         } else {
14954           return compareFilter_;
14955         }
14956       }
14957       /**
14958        * <code>required .CompareFilter compare_filter = 1;</code>
14959        */
14960       private com.google.protobuf.SingleFieldBuilder<
14961           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>
getCompareFilterFieldBuilder()14962           getCompareFilterFieldBuilder() {
14963         if (compareFilterBuilder_ == null) {
14964           compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
14965               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>(
14966                   compareFilter_,
14967                   getParentForChildren(),
14968                   isClean());
14969           compareFilter_ = null;
14970         }
14971         return compareFilterBuilder_;
14972       }
14973 
14974       // @@protoc_insertion_point(builder_scope:ValueFilter)
14975     }
14976 
14977     static {
14978       defaultInstance = new ValueFilter(true);
defaultInstance.initFields()14979       defaultInstance.initFields();
14980     }
14981 
14982     // @@protoc_insertion_point(class_scope:ValueFilter)
14983   }
14984 
14985   public interface WhileMatchFilterOrBuilder
14986       extends com.google.protobuf.MessageOrBuilder {
14987 
14988     // required .Filter filter = 1;
14989     /**
14990      * <code>required .Filter filter = 1;</code>
14991      */
hasFilter()14992     boolean hasFilter();
14993     /**
14994      * <code>required .Filter filter = 1;</code>
14995      */
getFilter()14996     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
14997     /**
14998      * <code>required .Filter filter = 1;</code>
14999      */
getFilterOrBuilder()15000     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
15001   }
15002   /**
15003    * Protobuf type {@code WhileMatchFilter}
15004    */
15005   public static final class WhileMatchFilter extends
15006       com.google.protobuf.GeneratedMessage
15007       implements WhileMatchFilterOrBuilder {
15008     // Use WhileMatchFilter.newBuilder() to construct.
WhileMatchFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)15009     private WhileMatchFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15010       super(builder);
15011       this.unknownFields = builder.getUnknownFields();
15012     }
WhileMatchFilter(boolean noInit)15013     private WhileMatchFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15014 
15015     private static final WhileMatchFilter defaultInstance;
getDefaultInstance()15016     public static WhileMatchFilter getDefaultInstance() {
15017       return defaultInstance;
15018     }
15019 
getDefaultInstanceForType()15020     public WhileMatchFilter getDefaultInstanceForType() {
15021       return defaultInstance;
15022     }
15023 
15024     private final com.google.protobuf.UnknownFieldSet unknownFields;
15025     @java.lang.Override
15026     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()15027         getUnknownFields() {
15028       return this.unknownFields;
15029     }
WhileMatchFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15030     private WhileMatchFilter(
15031         com.google.protobuf.CodedInputStream input,
15032         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15033         throws com.google.protobuf.InvalidProtocolBufferException {
15034       initFields();
15035       int mutable_bitField0_ = 0;
15036       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15037           com.google.protobuf.UnknownFieldSet.newBuilder();
15038       try {
15039         boolean done = false;
15040         while (!done) {
15041           int tag = input.readTag();
15042           switch (tag) {
15043             case 0:
15044               done = true;
15045               break;
15046             default: {
15047               if (!parseUnknownField(input, unknownFields,
15048                                      extensionRegistry, tag)) {
15049                 done = true;
15050               }
15051               break;
15052             }
15053             case 10: {
15054               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
15055               if (((bitField0_ & 0x00000001) == 0x00000001)) {
15056                 subBuilder = filter_.toBuilder();
15057               }
15058               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
15059               if (subBuilder != null) {
15060                 subBuilder.mergeFrom(filter_);
15061                 filter_ = subBuilder.buildPartial();
15062               }
15063               bitField0_ |= 0x00000001;
15064               break;
15065             }
15066           }
15067         }
15068       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15069         throw e.setUnfinishedMessage(this);
15070       } catch (java.io.IOException e) {
15071         throw new com.google.protobuf.InvalidProtocolBufferException(
15072             e.getMessage()).setUnfinishedMessage(this);
15073       } finally {
15074         this.unknownFields = unknownFields.build();
15075         makeExtensionsImmutable();
15076       }
15077     }
15078     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15079         getDescriptor() {
15080       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor;
15081     }
15082 
15083     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15084         internalGetFieldAccessorTable() {
15085       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable
15086           .ensureFieldAccessorsInitialized(
15087               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class);
15088     }
15089 
15090     public static com.google.protobuf.Parser<WhileMatchFilter> PARSER =
15091         new com.google.protobuf.AbstractParser<WhileMatchFilter>() {
15092       public WhileMatchFilter parsePartialFrom(
15093           com.google.protobuf.CodedInputStream input,
15094           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15095           throws com.google.protobuf.InvalidProtocolBufferException {
15096         return new WhileMatchFilter(input, extensionRegistry);
15097       }
15098     };
15099 
15100     @java.lang.Override
getParserForType()15101     public com.google.protobuf.Parser<WhileMatchFilter> getParserForType() {
15102       return PARSER;
15103     }
15104 
15105     private int bitField0_;
15106     // required .Filter filter = 1;
15107     public static final int FILTER_FIELD_NUMBER = 1;
15108     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
15109     /**
15110      * <code>required .Filter filter = 1;</code>
15111      */
hasFilter()15112     public boolean hasFilter() {
15113       return ((bitField0_ & 0x00000001) == 0x00000001);
15114     }
15115     /**
15116      * <code>required .Filter filter = 1;</code>
15117      */
getFilter()15118     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
15119       return filter_;
15120     }
15121     /**
15122      * <code>required .Filter filter = 1;</code>
15123      */
getFilterOrBuilder()15124     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
15125       return filter_;
15126     }
15127 
initFields()15128     private void initFields() {
15129       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15130     }
15131     private byte memoizedIsInitialized = -1;
isInitialized()15132     public final boolean isInitialized() {
15133       byte isInitialized = memoizedIsInitialized;
15134       if (isInitialized != -1) return isInitialized == 1;
15135 
15136       if (!hasFilter()) {
15137         memoizedIsInitialized = 0;
15138         return false;
15139       }
15140       if (!getFilter().isInitialized()) {
15141         memoizedIsInitialized = 0;
15142         return false;
15143       }
15144       memoizedIsInitialized = 1;
15145       return true;
15146     }
15147 
writeTo(com.google.protobuf.CodedOutputStream output)15148     public void writeTo(com.google.protobuf.CodedOutputStream output)
15149                         throws java.io.IOException {
15150       getSerializedSize();
15151       if (((bitField0_ & 0x00000001) == 0x00000001)) {
15152         output.writeMessage(1, filter_);
15153       }
15154       getUnknownFields().writeTo(output);
15155     }
15156 
15157     private int memoizedSerializedSize = -1;
getSerializedSize()15158     public int getSerializedSize() {
15159       int size = memoizedSerializedSize;
15160       if (size != -1) return size;
15161 
15162       size = 0;
15163       if (((bitField0_ & 0x00000001) == 0x00000001)) {
15164         size += com.google.protobuf.CodedOutputStream
15165           .computeMessageSize(1, filter_);
15166       }
15167       size += getUnknownFields().getSerializedSize();
15168       memoizedSerializedSize = size;
15169       return size;
15170     }
15171 
15172     private static final long serialVersionUID = 0L;
15173     @java.lang.Override
writeReplace()15174     protected java.lang.Object writeReplace()
15175         throws java.io.ObjectStreamException {
15176       return super.writeReplace();
15177     }
15178 
15179     @java.lang.Override
equals(final java.lang.Object obj)15180     public boolean equals(final java.lang.Object obj) {
15181       if (obj == this) {
15182        return true;
15183       }
15184       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)) {
15185         return super.equals(obj);
15186       }
15187       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) obj;
15188 
15189       boolean result = true;
15190       result = result && (hasFilter() == other.hasFilter());
15191       if (hasFilter()) {
15192         result = result && getFilter()
15193             .equals(other.getFilter());
15194       }
15195       result = result &&
15196           getUnknownFields().equals(other.getUnknownFields());
15197       return result;
15198     }
15199 
15200     private int memoizedHashCode = 0;
15201     @java.lang.Override
hashCode()15202     public int hashCode() {
15203       if (memoizedHashCode != 0) {
15204         return memoizedHashCode;
15205       }
15206       int hash = 41;
15207       hash = (19 * hash) + getDescriptorForType().hashCode();
15208       if (hasFilter()) {
15209         hash = (37 * hash) + FILTER_FIELD_NUMBER;
15210         hash = (53 * hash) + getFilter().hashCode();
15211       }
15212       hash = (29 * hash) + getUnknownFields().hashCode();
15213       memoizedHashCode = hash;
15214       return hash;
15215     }
15216 
parseFrom( com.google.protobuf.ByteString data)15217     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
15218         com.google.protobuf.ByteString data)
15219         throws com.google.protobuf.InvalidProtocolBufferException {
15220       return PARSER.parseFrom(data);
15221     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15222     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
15223         com.google.protobuf.ByteString data,
15224         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15225         throws com.google.protobuf.InvalidProtocolBufferException {
15226       return PARSER.parseFrom(data, extensionRegistry);
15227     }
parseFrom(byte[] data)15228     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(byte[] data)
15229         throws com.google.protobuf.InvalidProtocolBufferException {
15230       return PARSER.parseFrom(data);
15231     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15232     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
15233         byte[] data,
15234         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15235         throws com.google.protobuf.InvalidProtocolBufferException {
15236       return PARSER.parseFrom(data, extensionRegistry);
15237     }
parseFrom(java.io.InputStream input)15238     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(java.io.InputStream input)
15239         throws java.io.IOException {
15240       return PARSER.parseFrom(input);
15241     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15242     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
15243         java.io.InputStream input,
15244         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15245         throws java.io.IOException {
15246       return PARSER.parseFrom(input, extensionRegistry);
15247     }
parseDelimitedFrom(java.io.InputStream input)15248     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(java.io.InputStream input)
15249         throws java.io.IOException {
15250       return PARSER.parseDelimitedFrom(input);
15251     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15252     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseDelimitedFrom(
15253         java.io.InputStream input,
15254         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15255         throws java.io.IOException {
15256       return PARSER.parseDelimitedFrom(input, extensionRegistry);
15257     }
parseFrom( com.google.protobuf.CodedInputStream input)15258     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
15259         com.google.protobuf.CodedInputStream input)
15260         throws java.io.IOException {
15261       return PARSER.parseFrom(input);
15262     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15263     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parseFrom(
15264         com.google.protobuf.CodedInputStream input,
15265         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15266         throws java.io.IOException {
15267       return PARSER.parseFrom(input, extensionRegistry);
15268     }
15269 
newBuilder()15270     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()15271     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype)15272     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter prototype) {
15273       return newBuilder().mergeFrom(prototype);
15274     }
toBuilder()15275     public Builder toBuilder() { return newBuilder(this); }
15276 
15277     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)15278     protected Builder newBuilderForType(
15279         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15280       Builder builder = new Builder(parent);
15281       return builder;
15282     }
15283     /**
15284      * Protobuf type {@code WhileMatchFilter}
15285      */
15286     public static final class Builder extends
15287         com.google.protobuf.GeneratedMessage.Builder<Builder>
15288        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilterOrBuilder {
15289       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15290           getDescriptor() {
15291         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor;
15292       }
15293 
15294       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15295           internalGetFieldAccessorTable() {
15296         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_fieldAccessorTable
15297             .ensureFieldAccessorsInitialized(
15298                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.Builder.class);
15299       }
15300 
15301       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.newBuilder()
Builder()15302       private Builder() {
15303         maybeForceBuilderInitialization();
15304       }
15305 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)15306       private Builder(
15307           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15308         super(parent);
15309         maybeForceBuilderInitialization();
15310       }
maybeForceBuilderInitialization()15311       private void maybeForceBuilderInitialization() {
15312         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15313           getFilterFieldBuilder();
15314         }
15315       }
create()15316       private static Builder create() {
15317         return new Builder();
15318       }
15319 
clear()15320       public Builder clear() {
15321         super.clear();
15322         if (filterBuilder_ == null) {
15323           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15324         } else {
15325           filterBuilder_.clear();
15326         }
15327         bitField0_ = (bitField0_ & ~0x00000001);
15328         return this;
15329       }
15330 
clone()15331       public Builder clone() {
15332         return create().mergeFrom(buildPartial());
15333       }
15334 
15335       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()15336           getDescriptorForType() {
15337         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_WhileMatchFilter_descriptor;
15338       }
15339 
getDefaultInstanceForType()15340       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter getDefaultInstanceForType() {
15341         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance();
15342       }
15343 
build()15344       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter build() {
15345         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = buildPartial();
15346         if (!result.isInitialized()) {
15347           throw newUninitializedMessageException(result);
15348         }
15349         return result;
15350       }
15351 
buildPartial()15352       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter buildPartial() {
15353         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter(this);
15354         int from_bitField0_ = bitField0_;
15355         int to_bitField0_ = 0;
15356         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
15357           to_bitField0_ |= 0x00000001;
15358         }
15359         if (filterBuilder_ == null) {
15360           result.filter_ = filter_;
15361         } else {
15362           result.filter_ = filterBuilder_.build();
15363         }
15364         result.bitField0_ = to_bitField0_;
15365         onBuilt();
15366         return result;
15367       }
15368 
mergeFrom(com.google.protobuf.Message other)15369       public Builder mergeFrom(com.google.protobuf.Message other) {
15370         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) {
15371           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter)other);
15372         } else {
15373           super.mergeFrom(other);
15374           return this;
15375         }
15376       }
15377 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other)15378       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter other) {
15379         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter.getDefaultInstance()) return this;
15380         if (other.hasFilter()) {
15381           mergeFilter(other.getFilter());
15382         }
15383         this.mergeUnknownFields(other.getUnknownFields());
15384         return this;
15385       }
15386 
isInitialized()15387       public final boolean isInitialized() {
15388         if (!hasFilter()) {
15389 
15390           return false;
15391         }
15392         if (!getFilter().isInitialized()) {
15393 
15394           return false;
15395         }
15396         return true;
15397       }
15398 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15399       public Builder mergeFrom(
15400           com.google.protobuf.CodedInputStream input,
15401           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15402           throws java.io.IOException {
15403         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter parsedMessage = null;
15404         try {
15405           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15406         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15407           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.WhileMatchFilter) e.getUnfinishedMessage();
15408           throw e;
15409         } finally {
15410           if (parsedMessage != null) {
15411             mergeFrom(parsedMessage);
15412           }
15413         }
15414         return this;
15415       }
15416       private int bitField0_;
15417 
15418       // required .Filter filter = 1;
15419       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15420       private com.google.protobuf.SingleFieldBuilder<
15421           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
15422       /**
15423        * <code>required .Filter filter = 1;</code>
15424        */
hasFilter()15425       public boolean hasFilter() {
15426         return ((bitField0_ & 0x00000001) == 0x00000001);
15427       }
15428       /**
15429        * <code>required .Filter filter = 1;</code>
15430        */
getFilter()15431       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
15432         if (filterBuilder_ == null) {
15433           return filter_;
15434         } else {
15435           return filterBuilder_.getMessage();
15436         }
15437       }
15438       /**
15439        * <code>required .Filter filter = 1;</code>
15440        */
setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)15441       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
15442         if (filterBuilder_ == null) {
15443           if (value == null) {
15444             throw new NullPointerException();
15445           }
15446           filter_ = value;
15447           onChanged();
15448         } else {
15449           filterBuilder_.setMessage(value);
15450         }
15451         bitField0_ |= 0x00000001;
15452         return this;
15453       }
15454       /**
15455        * <code>required .Filter filter = 1;</code>
15456        */
setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)15457       public Builder setFilter(
15458           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
15459         if (filterBuilder_ == null) {
15460           filter_ = builderForValue.build();
15461           onChanged();
15462         } else {
15463           filterBuilder_.setMessage(builderForValue.build());
15464         }
15465         bitField0_ |= 0x00000001;
15466         return this;
15467       }
15468       /**
15469        * <code>required .Filter filter = 1;</code>
15470        */
mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)15471       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
15472         if (filterBuilder_ == null) {
15473           if (((bitField0_ & 0x00000001) == 0x00000001) &&
15474               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
15475             filter_ =
15476               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
15477           } else {
15478             filter_ = value;
15479           }
15480           onChanged();
15481         } else {
15482           filterBuilder_.mergeFrom(value);
15483         }
15484         bitField0_ |= 0x00000001;
15485         return this;
15486       }
15487       /**
15488        * <code>required .Filter filter = 1;</code>
15489        */
clearFilter()15490       public Builder clearFilter() {
15491         if (filterBuilder_ == null) {
15492           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15493           onChanged();
15494         } else {
15495           filterBuilder_.clear();
15496         }
15497         bitField0_ = (bitField0_ & ~0x00000001);
15498         return this;
15499       }
15500       /**
15501        * <code>required .Filter filter = 1;</code>
15502        */
getFilterBuilder()15503       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
15504         bitField0_ |= 0x00000001;
15505         onChanged();
15506         return getFilterFieldBuilder().getBuilder();
15507       }
15508       /**
15509        * <code>required .Filter filter = 1;</code>
15510        */
getFilterOrBuilder()15511       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
15512         if (filterBuilder_ != null) {
15513           return filterBuilder_.getMessageOrBuilder();
15514         } else {
15515           return filter_;
15516         }
15517       }
15518       /**
15519        * <code>required .Filter filter = 1;</code>
15520        */
15521       private com.google.protobuf.SingleFieldBuilder<
15522           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder()15523           getFilterFieldBuilder() {
15524         if (filterBuilder_ == null) {
15525           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
15526               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
15527                   filter_,
15528                   getParentForChildren(),
15529                   isClean());
15530           filter_ = null;
15531         }
15532         return filterBuilder_;
15533       }
15534 
15535       // @@protoc_insertion_point(builder_scope:WhileMatchFilter)
15536     }
15537 
15538     static {
15539       defaultInstance = new WhileMatchFilter(true);
defaultInstance.initFields()15540       defaultInstance.initFields();
15541     }
15542 
15543     // @@protoc_insertion_point(class_scope:WhileMatchFilter)
15544   }
15545 
15546   public interface FilterAllFilterOrBuilder
15547       extends com.google.protobuf.MessageOrBuilder {
15548   }
15549   /**
15550    * Protobuf type {@code FilterAllFilter}
15551    */
15552   public static final class FilterAllFilter extends
15553       com.google.protobuf.GeneratedMessage
15554       implements FilterAllFilterOrBuilder {
15555     // Use FilterAllFilter.newBuilder() to construct.
FilterAllFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)15556     private FilterAllFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15557       super(builder);
15558       this.unknownFields = builder.getUnknownFields();
15559     }
FilterAllFilter(boolean noInit)15560     private FilterAllFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15561 
15562     private static final FilterAllFilter defaultInstance;
getDefaultInstance()15563     public static FilterAllFilter getDefaultInstance() {
15564       return defaultInstance;
15565     }
15566 
getDefaultInstanceForType()15567     public FilterAllFilter getDefaultInstanceForType() {
15568       return defaultInstance;
15569     }
15570 
15571     private final com.google.protobuf.UnknownFieldSet unknownFields;
15572     @java.lang.Override
15573     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()15574         getUnknownFields() {
15575       return this.unknownFields;
15576     }
FilterAllFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15577     private FilterAllFilter(
15578         com.google.protobuf.CodedInputStream input,
15579         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15580         throws com.google.protobuf.InvalidProtocolBufferException {
15581       initFields();
15582       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15583           com.google.protobuf.UnknownFieldSet.newBuilder();
15584       try {
15585         boolean done = false;
15586         while (!done) {
15587           int tag = input.readTag();
15588           switch (tag) {
15589             case 0:
15590               done = true;
15591               break;
15592             default: {
15593               if (!parseUnknownField(input, unknownFields,
15594                                      extensionRegistry, tag)) {
15595                 done = true;
15596               }
15597               break;
15598             }
15599           }
15600         }
15601       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15602         throw e.setUnfinishedMessage(this);
15603       } catch (java.io.IOException e) {
15604         throw new com.google.protobuf.InvalidProtocolBufferException(
15605             e.getMessage()).setUnfinishedMessage(this);
15606       } finally {
15607         this.unknownFields = unknownFields.build();
15608         makeExtensionsImmutable();
15609       }
15610     }
15611     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15612         getDescriptor() {
15613       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterAllFilter_descriptor;
15614     }
15615 
15616     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15617         internalGetFieldAccessorTable() {
15618       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterAllFilter_fieldAccessorTable
15619           .ensureFieldAccessorsInitialized(
15620               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class);
15621     }
15622 
15623     public static com.google.protobuf.Parser<FilterAllFilter> PARSER =
15624         new com.google.protobuf.AbstractParser<FilterAllFilter>() {
15625       public FilterAllFilter parsePartialFrom(
15626           com.google.protobuf.CodedInputStream input,
15627           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15628           throws com.google.protobuf.InvalidProtocolBufferException {
15629         return new FilterAllFilter(input, extensionRegistry);
15630       }
15631     };
15632 
15633     @java.lang.Override
getParserForType()15634     public com.google.protobuf.Parser<FilterAllFilter> getParserForType() {
15635       return PARSER;
15636     }
15637 
initFields()15638     private void initFields() {
15639     }
15640     private byte memoizedIsInitialized = -1;
isInitialized()15641     public final boolean isInitialized() {
15642       byte isInitialized = memoizedIsInitialized;
15643       if (isInitialized != -1) return isInitialized == 1;
15644 
15645       memoizedIsInitialized = 1;
15646       return true;
15647     }
15648 
writeTo(com.google.protobuf.CodedOutputStream output)15649     public void writeTo(com.google.protobuf.CodedOutputStream output)
15650                         throws java.io.IOException {
15651       getSerializedSize();
15652       getUnknownFields().writeTo(output);
15653     }
15654 
15655     private int memoizedSerializedSize = -1;
getSerializedSize()15656     public int getSerializedSize() {
15657       int size = memoizedSerializedSize;
15658       if (size != -1) return size;
15659 
15660       size = 0;
15661       size += getUnknownFields().getSerializedSize();
15662       memoizedSerializedSize = size;
15663       return size;
15664     }
15665 
15666     private static final long serialVersionUID = 0L;
15667     @java.lang.Override
writeReplace()15668     protected java.lang.Object writeReplace()
15669         throws java.io.ObjectStreamException {
15670       return super.writeReplace();
15671     }
15672 
15673     @java.lang.Override
equals(final java.lang.Object obj)15674     public boolean equals(final java.lang.Object obj) {
15675       if (obj == this) {
15676        return true;
15677       }
15678       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter)) {
15679         return super.equals(obj);
15680       }
15681       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) obj;
15682 
15683       boolean result = true;
15684       result = result &&
15685           getUnknownFields().equals(other.getUnknownFields());
15686       return result;
15687     }
15688 
15689     private int memoizedHashCode = 0;
15690     @java.lang.Override
hashCode()15691     public int hashCode() {
15692       if (memoizedHashCode != 0) {
15693         return memoizedHashCode;
15694       }
15695       int hash = 41;
15696       hash = (19 * hash) + getDescriptorForType().hashCode();
15697       hash = (29 * hash) + getUnknownFields().hashCode();
15698       memoizedHashCode = hash;
15699       return hash;
15700     }
15701 
parseFrom( com.google.protobuf.ByteString data)15702     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
15703         com.google.protobuf.ByteString data)
15704         throws com.google.protobuf.InvalidProtocolBufferException {
15705       return PARSER.parseFrom(data);
15706     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15707     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
15708         com.google.protobuf.ByteString data,
15709         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15710         throws com.google.protobuf.InvalidProtocolBufferException {
15711       return PARSER.parseFrom(data, extensionRegistry);
15712     }
parseFrom(byte[] data)15713     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(byte[] data)
15714         throws com.google.protobuf.InvalidProtocolBufferException {
15715       return PARSER.parseFrom(data);
15716     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15717     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
15718         byte[] data,
15719         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15720         throws com.google.protobuf.InvalidProtocolBufferException {
15721       return PARSER.parseFrom(data, extensionRegistry);
15722     }
parseFrom(java.io.InputStream input)15723     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(java.io.InputStream input)
15724         throws java.io.IOException {
15725       return PARSER.parseFrom(input);
15726     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15727     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
15728         java.io.InputStream input,
15729         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15730         throws java.io.IOException {
15731       return PARSER.parseFrom(input, extensionRegistry);
15732     }
parseDelimitedFrom(java.io.InputStream input)15733     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom(java.io.InputStream input)
15734         throws java.io.IOException {
15735       return PARSER.parseDelimitedFrom(input);
15736     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15737     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseDelimitedFrom(
15738         java.io.InputStream input,
15739         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15740         throws java.io.IOException {
15741       return PARSER.parseDelimitedFrom(input, extensionRegistry);
15742     }
parseFrom( com.google.protobuf.CodedInputStream input)15743     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
15744         com.google.protobuf.CodedInputStream input)
15745         throws java.io.IOException {
15746       return PARSER.parseFrom(input);
15747     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15748     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parseFrom(
15749         com.google.protobuf.CodedInputStream input,
15750         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15751         throws java.io.IOException {
15752       return PARSER.parseFrom(input, extensionRegistry);
15753     }
15754 
newBuilder()15755     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()15756     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter prototype)15757     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter prototype) {
15758       return newBuilder().mergeFrom(prototype);
15759     }
toBuilder()15760     public Builder toBuilder() { return newBuilder(this); }
15761 
15762     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)15763     protected Builder newBuilderForType(
15764         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15765       Builder builder = new Builder(parent);
15766       return builder;
15767     }
15768     /**
15769      * Protobuf type {@code FilterAllFilter}
15770      */
15771     public static final class Builder extends
15772         com.google.protobuf.GeneratedMessage.Builder<Builder>
15773        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilterOrBuilder {
15774       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15775           getDescriptor() {
15776         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterAllFilter_descriptor;
15777       }
15778 
15779       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15780           internalGetFieldAccessorTable() {
15781         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterAllFilter_fieldAccessorTable
15782             .ensureFieldAccessorsInitialized(
15783                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.Builder.class);
15784       }
15785 
15786       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.newBuilder()
Builder()15787       private Builder() {
15788         maybeForceBuilderInitialization();
15789       }
15790 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)15791       private Builder(
15792           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15793         super(parent);
15794         maybeForceBuilderInitialization();
15795       }
maybeForceBuilderInitialization()15796       private void maybeForceBuilderInitialization() {
15797         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15798         }
15799       }
create()15800       private static Builder create() {
15801         return new Builder();
15802       }
15803 
clear()15804       public Builder clear() {
15805         super.clear();
15806         return this;
15807       }
15808 
clone()15809       public Builder clone() {
15810         return create().mergeFrom(buildPartial());
15811       }
15812 
15813       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()15814           getDescriptorForType() {
15815         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterAllFilter_descriptor;
15816       }
15817 
getDefaultInstanceForType()15818       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter getDefaultInstanceForType() {
15819         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance();
15820       }
15821 
build()15822       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter build() {
15823         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter result = buildPartial();
15824         if (!result.isInitialized()) {
15825           throw newUninitializedMessageException(result);
15826         }
15827         return result;
15828       }
15829 
buildPartial()15830       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter buildPartial() {
15831         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter(this);
15832         onBuilt();
15833         return result;
15834       }
15835 
mergeFrom(com.google.protobuf.Message other)15836       public Builder mergeFrom(com.google.protobuf.Message other) {
15837         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) {
15838           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter)other);
15839         } else {
15840           super.mergeFrom(other);
15841           return this;
15842         }
15843       }
15844 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other)15845       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter other) {
15846         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter.getDefaultInstance()) return this;
15847         this.mergeUnknownFields(other.getUnknownFields());
15848         return this;
15849       }
15850 
isInitialized()15851       public final boolean isInitialized() {
15852         return true;
15853       }
15854 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15855       public Builder mergeFrom(
15856           com.google.protobuf.CodedInputStream input,
15857           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15858           throws java.io.IOException {
15859         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter parsedMessage = null;
15860         try {
15861           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15862         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15863           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterAllFilter) e.getUnfinishedMessage();
15864           throw e;
15865         } finally {
15866           if (parsedMessage != null) {
15867             mergeFrom(parsedMessage);
15868           }
15869         }
15870         return this;
15871       }
15872 
15873       // @@protoc_insertion_point(builder_scope:FilterAllFilter)
15874     }
15875 
15876     static {
15877       defaultInstance = new FilterAllFilter(true);
defaultInstance.initFields()15878       defaultInstance.initFields();
15879     }
15880 
15881     // @@protoc_insertion_point(class_scope:FilterAllFilter)
15882   }
15883 
15884   public interface RowRangeOrBuilder
15885       extends com.google.protobuf.MessageOrBuilder {
15886 
15887     // optional bytes start_row = 1;
15888     /**
15889      * <code>optional bytes start_row = 1;</code>
15890      */
hasStartRow()15891     boolean hasStartRow();
15892     /**
15893      * <code>optional bytes start_row = 1;</code>
15894      */
getStartRow()15895     com.google.protobuf.ByteString getStartRow();
15896 
15897     // optional bool start_row_inclusive = 2;
15898     /**
15899      * <code>optional bool start_row_inclusive = 2;</code>
15900      */
hasStartRowInclusive()15901     boolean hasStartRowInclusive();
15902     /**
15903      * <code>optional bool start_row_inclusive = 2;</code>
15904      */
getStartRowInclusive()15905     boolean getStartRowInclusive();
15906 
15907     // optional bytes stop_row = 3;
15908     /**
15909      * <code>optional bytes stop_row = 3;</code>
15910      */
hasStopRow()15911     boolean hasStopRow();
15912     /**
15913      * <code>optional bytes stop_row = 3;</code>
15914      */
getStopRow()15915     com.google.protobuf.ByteString getStopRow();
15916 
15917     // optional bool stop_row_inclusive = 4;
15918     /**
15919      * <code>optional bool stop_row_inclusive = 4;</code>
15920      */
hasStopRowInclusive()15921     boolean hasStopRowInclusive();
15922     /**
15923      * <code>optional bool stop_row_inclusive = 4;</code>
15924      */
getStopRowInclusive()15925     boolean getStopRowInclusive();
15926   }
15927   /**
15928    * Protobuf type {@code RowRange}
15929    */
15930   public static final class RowRange extends
15931       com.google.protobuf.GeneratedMessage
15932       implements RowRangeOrBuilder {
15933     // Use RowRange.newBuilder() to construct.
RowRange(com.google.protobuf.GeneratedMessage.Builder<?> builder)15934     private RowRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
15935       super(builder);
15936       this.unknownFields = builder.getUnknownFields();
15937     }
RowRange(boolean noInit)15938     private RowRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
15939 
15940     private static final RowRange defaultInstance;
getDefaultInstance()15941     public static RowRange getDefaultInstance() {
15942       return defaultInstance;
15943     }
15944 
getDefaultInstanceForType()15945     public RowRange getDefaultInstanceForType() {
15946       return defaultInstance;
15947     }
15948 
15949     private final com.google.protobuf.UnknownFieldSet unknownFields;
15950     @java.lang.Override
15951     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()15952         getUnknownFields() {
15953       return this.unknownFields;
15954     }
RowRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15955     private RowRange(
15956         com.google.protobuf.CodedInputStream input,
15957         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15958         throws com.google.protobuf.InvalidProtocolBufferException {
15959       initFields();
15960       int mutable_bitField0_ = 0;
15961       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
15962           com.google.protobuf.UnknownFieldSet.newBuilder();
15963       try {
15964         boolean done = false;
15965         while (!done) {
15966           int tag = input.readTag();
15967           switch (tag) {
15968             case 0:
15969               done = true;
15970               break;
15971             default: {
15972               if (!parseUnknownField(input, unknownFields,
15973                                      extensionRegistry, tag)) {
15974                 done = true;
15975               }
15976               break;
15977             }
15978             case 10: {
15979               bitField0_ |= 0x00000001;
15980               startRow_ = input.readBytes();
15981               break;
15982             }
15983             case 16: {
15984               bitField0_ |= 0x00000002;
15985               startRowInclusive_ = input.readBool();
15986               break;
15987             }
15988             case 26: {
15989               bitField0_ |= 0x00000004;
15990               stopRow_ = input.readBytes();
15991               break;
15992             }
15993             case 32: {
15994               bitField0_ |= 0x00000008;
15995               stopRowInclusive_ = input.readBool();
15996               break;
15997             }
15998           }
15999         }
16000       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16001         throw e.setUnfinishedMessage(this);
16002       } catch (java.io.IOException e) {
16003         throw new com.google.protobuf.InvalidProtocolBufferException(
16004             e.getMessage()).setUnfinishedMessage(this);
16005       } finally {
16006         this.unknownFields = unknownFields.build();
16007         makeExtensionsImmutable();
16008       }
16009     }
16010     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16011         getDescriptor() {
16012       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowRange_descriptor;
16013     }
16014 
16015     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16016         internalGetFieldAccessorTable() {
16017       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowRange_fieldAccessorTable
16018           .ensureFieldAccessorsInitialized(
16019               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder.class);
16020     }
16021 
16022     public static com.google.protobuf.Parser<RowRange> PARSER =
16023         new com.google.protobuf.AbstractParser<RowRange>() {
16024       public RowRange parsePartialFrom(
16025           com.google.protobuf.CodedInputStream input,
16026           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16027           throws com.google.protobuf.InvalidProtocolBufferException {
16028         return new RowRange(input, extensionRegistry);
16029       }
16030     };
16031 
16032     @java.lang.Override
getParserForType()16033     public com.google.protobuf.Parser<RowRange> getParserForType() {
16034       return PARSER;
16035     }
16036 
16037     private int bitField0_;
16038     // optional bytes start_row = 1;
16039     public static final int START_ROW_FIELD_NUMBER = 1;
16040     private com.google.protobuf.ByteString startRow_;
16041     /**
16042      * <code>optional bytes start_row = 1;</code>
16043      */
hasStartRow()16044     public boolean hasStartRow() {
16045       return ((bitField0_ & 0x00000001) == 0x00000001);
16046     }
16047     /**
16048      * <code>optional bytes start_row = 1;</code>
16049      */
getStartRow()16050     public com.google.protobuf.ByteString getStartRow() {
16051       return startRow_;
16052     }
16053 
16054     // optional bool start_row_inclusive = 2;
16055     public static final int START_ROW_INCLUSIVE_FIELD_NUMBER = 2;
16056     private boolean startRowInclusive_;
16057     /**
16058      * <code>optional bool start_row_inclusive = 2;</code>
16059      */
hasStartRowInclusive()16060     public boolean hasStartRowInclusive() {
16061       return ((bitField0_ & 0x00000002) == 0x00000002);
16062     }
16063     /**
16064      * <code>optional bool start_row_inclusive = 2;</code>
16065      */
getStartRowInclusive()16066     public boolean getStartRowInclusive() {
16067       return startRowInclusive_;
16068     }
16069 
16070     // optional bytes stop_row = 3;
16071     public static final int STOP_ROW_FIELD_NUMBER = 3;
16072     private com.google.protobuf.ByteString stopRow_;
16073     /**
16074      * <code>optional bytes stop_row = 3;</code>
16075      */
hasStopRow()16076     public boolean hasStopRow() {
16077       return ((bitField0_ & 0x00000004) == 0x00000004);
16078     }
16079     /**
16080      * <code>optional bytes stop_row = 3;</code>
16081      */
getStopRow()16082     public com.google.protobuf.ByteString getStopRow() {
16083       return stopRow_;
16084     }
16085 
16086     // optional bool stop_row_inclusive = 4;
16087     public static final int STOP_ROW_INCLUSIVE_FIELD_NUMBER = 4;
16088     private boolean stopRowInclusive_;
16089     /**
16090      * <code>optional bool stop_row_inclusive = 4;</code>
16091      */
hasStopRowInclusive()16092     public boolean hasStopRowInclusive() {
16093       return ((bitField0_ & 0x00000008) == 0x00000008);
16094     }
16095     /**
16096      * <code>optional bool stop_row_inclusive = 4;</code>
16097      */
getStopRowInclusive()16098     public boolean getStopRowInclusive() {
16099       return stopRowInclusive_;
16100     }
16101 
initFields()16102     private void initFields() {
16103       startRow_ = com.google.protobuf.ByteString.EMPTY;
16104       startRowInclusive_ = false;
16105       stopRow_ = com.google.protobuf.ByteString.EMPTY;
16106       stopRowInclusive_ = false;
16107     }
16108     private byte memoizedIsInitialized = -1;
isInitialized()16109     public final boolean isInitialized() {
16110       byte isInitialized = memoizedIsInitialized;
16111       if (isInitialized != -1) return isInitialized == 1;
16112 
16113       memoizedIsInitialized = 1;
16114       return true;
16115     }
16116 
writeTo(com.google.protobuf.CodedOutputStream output)16117     public void writeTo(com.google.protobuf.CodedOutputStream output)
16118                         throws java.io.IOException {
16119       getSerializedSize();
16120       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16121         output.writeBytes(1, startRow_);
16122       }
16123       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16124         output.writeBool(2, startRowInclusive_);
16125       }
16126       if (((bitField0_ & 0x00000004) == 0x00000004)) {
16127         output.writeBytes(3, stopRow_);
16128       }
16129       if (((bitField0_ & 0x00000008) == 0x00000008)) {
16130         output.writeBool(4, stopRowInclusive_);
16131       }
16132       getUnknownFields().writeTo(output);
16133     }
16134 
16135     private int memoizedSerializedSize = -1;
getSerializedSize()16136     public int getSerializedSize() {
16137       int size = memoizedSerializedSize;
16138       if (size != -1) return size;
16139 
16140       size = 0;
16141       if (((bitField0_ & 0x00000001) == 0x00000001)) {
16142         size += com.google.protobuf.CodedOutputStream
16143           .computeBytesSize(1, startRow_);
16144       }
16145       if (((bitField0_ & 0x00000002) == 0x00000002)) {
16146         size += com.google.protobuf.CodedOutputStream
16147           .computeBoolSize(2, startRowInclusive_);
16148       }
16149       if (((bitField0_ & 0x00000004) == 0x00000004)) {
16150         size += com.google.protobuf.CodedOutputStream
16151           .computeBytesSize(3, stopRow_);
16152       }
16153       if (((bitField0_ & 0x00000008) == 0x00000008)) {
16154         size += com.google.protobuf.CodedOutputStream
16155           .computeBoolSize(4, stopRowInclusive_);
16156       }
16157       size += getUnknownFields().getSerializedSize();
16158       memoizedSerializedSize = size;
16159       return size;
16160     }
16161 
16162     private static final long serialVersionUID = 0L;
16163     @java.lang.Override
writeReplace()16164     protected java.lang.Object writeReplace()
16165         throws java.io.ObjectStreamException {
16166       return super.writeReplace();
16167     }
16168 
16169     @java.lang.Override
equals(final java.lang.Object obj)16170     public boolean equals(final java.lang.Object obj) {
16171       if (obj == this) {
16172        return true;
16173       }
16174       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange)) {
16175         return super.equals(obj);
16176       }
16177       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) obj;
16178 
16179       boolean result = true;
16180       result = result && (hasStartRow() == other.hasStartRow());
16181       if (hasStartRow()) {
16182         result = result && getStartRow()
16183             .equals(other.getStartRow());
16184       }
16185       result = result && (hasStartRowInclusive() == other.hasStartRowInclusive());
16186       if (hasStartRowInclusive()) {
16187         result = result && (getStartRowInclusive()
16188             == other.getStartRowInclusive());
16189       }
16190       result = result && (hasStopRow() == other.hasStopRow());
16191       if (hasStopRow()) {
16192         result = result && getStopRow()
16193             .equals(other.getStopRow());
16194       }
16195       result = result && (hasStopRowInclusive() == other.hasStopRowInclusive());
16196       if (hasStopRowInclusive()) {
16197         result = result && (getStopRowInclusive()
16198             == other.getStopRowInclusive());
16199       }
16200       result = result &&
16201           getUnknownFields().equals(other.getUnknownFields());
16202       return result;
16203     }
16204 
16205     private int memoizedHashCode = 0;
16206     @java.lang.Override
hashCode()16207     public int hashCode() {
16208       if (memoizedHashCode != 0) {
16209         return memoizedHashCode;
16210       }
16211       int hash = 41;
16212       hash = (19 * hash) + getDescriptorForType().hashCode();
16213       if (hasStartRow()) {
16214         hash = (37 * hash) + START_ROW_FIELD_NUMBER;
16215         hash = (53 * hash) + getStartRow().hashCode();
16216       }
16217       if (hasStartRowInclusive()) {
16218         hash = (37 * hash) + START_ROW_INCLUSIVE_FIELD_NUMBER;
16219         hash = (53 * hash) + hashBoolean(getStartRowInclusive());
16220       }
16221       if (hasStopRow()) {
16222         hash = (37 * hash) + STOP_ROW_FIELD_NUMBER;
16223         hash = (53 * hash) + getStopRow().hashCode();
16224       }
16225       if (hasStopRowInclusive()) {
16226         hash = (37 * hash) + STOP_ROW_INCLUSIVE_FIELD_NUMBER;
16227         hash = (53 * hash) + hashBoolean(getStopRowInclusive());
16228       }
16229       hash = (29 * hash) + getUnknownFields().hashCode();
16230       memoizedHashCode = hash;
16231       return hash;
16232     }
16233 
parseFrom( com.google.protobuf.ByteString data)16234     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
16235         com.google.protobuf.ByteString data)
16236         throws com.google.protobuf.InvalidProtocolBufferException {
16237       return PARSER.parseFrom(data);
16238     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16239     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
16240         com.google.protobuf.ByteString data,
16241         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16242         throws com.google.protobuf.InvalidProtocolBufferException {
16243       return PARSER.parseFrom(data, extensionRegistry);
16244     }
parseFrom(byte[] data)16245     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(byte[] data)
16246         throws com.google.protobuf.InvalidProtocolBufferException {
16247       return PARSER.parseFrom(data);
16248     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16249     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
16250         byte[] data,
16251         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16252         throws com.google.protobuf.InvalidProtocolBufferException {
16253       return PARSER.parseFrom(data, extensionRegistry);
16254     }
parseFrom(java.io.InputStream input)16255     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(java.io.InputStream input)
16256         throws java.io.IOException {
16257       return PARSER.parseFrom(input);
16258     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16259     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
16260         java.io.InputStream input,
16261         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16262         throws java.io.IOException {
16263       return PARSER.parseFrom(input, extensionRegistry);
16264     }
parseDelimitedFrom(java.io.InputStream input)16265     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom(java.io.InputStream input)
16266         throws java.io.IOException {
16267       return PARSER.parseDelimitedFrom(input);
16268     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16269     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseDelimitedFrom(
16270         java.io.InputStream input,
16271         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16272         throws java.io.IOException {
16273       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16274     }
parseFrom( com.google.protobuf.CodedInputStream input)16275     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
16276         com.google.protobuf.CodedInputStream input)
16277         throws java.io.IOException {
16278       return PARSER.parseFrom(input);
16279     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16280     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parseFrom(
16281         com.google.protobuf.CodedInputStream input,
16282         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16283         throws java.io.IOException {
16284       return PARSER.parseFrom(input, extensionRegistry);
16285     }
16286 
newBuilder()16287     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()16288     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange prototype)16289     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange prototype) {
16290       return newBuilder().mergeFrom(prototype);
16291     }
toBuilder()16292     public Builder toBuilder() { return newBuilder(this); }
16293 
16294     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)16295     protected Builder newBuilderForType(
16296         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16297       Builder builder = new Builder(parent);
16298       return builder;
16299     }
16300     /**
16301      * Protobuf type {@code RowRange}
16302      */
16303     public static final class Builder extends
16304         com.google.protobuf.GeneratedMessage.Builder<Builder>
16305        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder {
16306       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16307           getDescriptor() {
16308         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowRange_descriptor;
16309       }
16310 
16311       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16312           internalGetFieldAccessorTable() {
16313         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowRange_fieldAccessorTable
16314             .ensureFieldAccessorsInitialized(
16315                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder.class);
16316       }
16317 
16318       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.newBuilder()
Builder()16319       private Builder() {
16320         maybeForceBuilderInitialization();
16321       }
16322 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)16323       private Builder(
16324           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16325         super(parent);
16326         maybeForceBuilderInitialization();
16327       }
maybeForceBuilderInitialization()16328       private void maybeForceBuilderInitialization() {
16329         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16330         }
16331       }
create()16332       private static Builder create() {
16333         return new Builder();
16334       }
16335 
clear()16336       public Builder clear() {
16337         super.clear();
16338         startRow_ = com.google.protobuf.ByteString.EMPTY;
16339         bitField0_ = (bitField0_ & ~0x00000001);
16340         startRowInclusive_ = false;
16341         bitField0_ = (bitField0_ & ~0x00000002);
16342         stopRow_ = com.google.protobuf.ByteString.EMPTY;
16343         bitField0_ = (bitField0_ & ~0x00000004);
16344         stopRowInclusive_ = false;
16345         bitField0_ = (bitField0_ & ~0x00000008);
16346         return this;
16347       }
16348 
clone()16349       public Builder clone() {
16350         return create().mergeFrom(buildPartial());
16351       }
16352 
16353       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()16354           getDescriptorForType() {
16355         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_RowRange_descriptor;
16356       }
16357 
getDefaultInstanceForType()16358       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getDefaultInstanceForType() {
16359         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance();
16360       }
16361 
build()16362       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange build() {
16363         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange result = buildPartial();
16364         if (!result.isInitialized()) {
16365           throw newUninitializedMessageException(result);
16366         }
16367         return result;
16368       }
16369 
buildPartial()16370       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange buildPartial() {
16371         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange(this);
16372         int from_bitField0_ = bitField0_;
16373         int to_bitField0_ = 0;
16374         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
16375           to_bitField0_ |= 0x00000001;
16376         }
16377         result.startRow_ = startRow_;
16378         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
16379           to_bitField0_ |= 0x00000002;
16380         }
16381         result.startRowInclusive_ = startRowInclusive_;
16382         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
16383           to_bitField0_ |= 0x00000004;
16384         }
16385         result.stopRow_ = stopRow_;
16386         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
16387           to_bitField0_ |= 0x00000008;
16388         }
16389         result.stopRowInclusive_ = stopRowInclusive_;
16390         result.bitField0_ = to_bitField0_;
16391         onBuilt();
16392         return result;
16393       }
16394 
mergeFrom(com.google.protobuf.Message other)16395       public Builder mergeFrom(com.google.protobuf.Message other) {
16396         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) {
16397           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange)other);
16398         } else {
16399           super.mergeFrom(other);
16400           return this;
16401         }
16402       }
16403 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other)16404       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange other) {
16405         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance()) return this;
16406         if (other.hasStartRow()) {
16407           setStartRow(other.getStartRow());
16408         }
16409         if (other.hasStartRowInclusive()) {
16410           setStartRowInclusive(other.getStartRowInclusive());
16411         }
16412         if (other.hasStopRow()) {
16413           setStopRow(other.getStopRow());
16414         }
16415         if (other.hasStopRowInclusive()) {
16416           setStopRowInclusive(other.getStopRowInclusive());
16417         }
16418         this.mergeUnknownFields(other.getUnknownFields());
16419         return this;
16420       }
16421 
isInitialized()16422       public final boolean isInitialized() {
16423         return true;
16424       }
16425 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16426       public Builder mergeFrom(
16427           com.google.protobuf.CodedInputStream input,
16428           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16429           throws java.io.IOException {
16430         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange parsedMessage = null;
16431         try {
16432           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
16433         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16434           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange) e.getUnfinishedMessage();
16435           throw e;
16436         } finally {
16437           if (parsedMessage != null) {
16438             mergeFrom(parsedMessage);
16439           }
16440         }
16441         return this;
16442       }
16443       private int bitField0_;
16444 
16445       // optional bytes start_row = 1;
16446       private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
16447       /**
16448        * <code>optional bytes start_row = 1;</code>
16449        */
hasStartRow()16450       public boolean hasStartRow() {
16451         return ((bitField0_ & 0x00000001) == 0x00000001);
16452       }
16453       /**
16454        * <code>optional bytes start_row = 1;</code>
16455        */
getStartRow()16456       public com.google.protobuf.ByteString getStartRow() {
16457         return startRow_;
16458       }
16459       /**
16460        * <code>optional bytes start_row = 1;</code>
16461        */
setStartRow(com.google.protobuf.ByteString value)16462       public Builder setStartRow(com.google.protobuf.ByteString value) {
16463         if (value == null) {
16464     throw new NullPointerException();
16465   }
16466   bitField0_ |= 0x00000001;
16467         startRow_ = value;
16468         onChanged();
16469         return this;
16470       }
16471       /**
16472        * <code>optional bytes start_row = 1;</code>
16473        */
clearStartRow()16474       public Builder clearStartRow() {
16475         bitField0_ = (bitField0_ & ~0x00000001);
16476         startRow_ = getDefaultInstance().getStartRow();
16477         onChanged();
16478         return this;
16479       }
16480 
16481       // optional bool start_row_inclusive = 2;
16482       private boolean startRowInclusive_ ;
16483       /**
16484        * <code>optional bool start_row_inclusive = 2;</code>
16485        */
hasStartRowInclusive()16486       public boolean hasStartRowInclusive() {
16487         return ((bitField0_ & 0x00000002) == 0x00000002);
16488       }
16489       /**
16490        * <code>optional bool start_row_inclusive = 2;</code>
16491        */
getStartRowInclusive()16492       public boolean getStartRowInclusive() {
16493         return startRowInclusive_;
16494       }
16495       /**
16496        * <code>optional bool start_row_inclusive = 2;</code>
16497        */
setStartRowInclusive(boolean value)16498       public Builder setStartRowInclusive(boolean value) {
16499         bitField0_ |= 0x00000002;
16500         startRowInclusive_ = value;
16501         onChanged();
16502         return this;
16503       }
16504       /**
16505        * <code>optional bool start_row_inclusive = 2;</code>
16506        */
clearStartRowInclusive()16507       public Builder clearStartRowInclusive() {
16508         bitField0_ = (bitField0_ & ~0x00000002);
16509         startRowInclusive_ = false;
16510         onChanged();
16511         return this;
16512       }
16513 
16514       // optional bytes stop_row = 3;
16515       private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY;
16516       /**
16517        * <code>optional bytes stop_row = 3;</code>
16518        */
hasStopRow()16519       public boolean hasStopRow() {
16520         return ((bitField0_ & 0x00000004) == 0x00000004);
16521       }
16522       /**
16523        * <code>optional bytes stop_row = 3;</code>
16524        */
getStopRow()16525       public com.google.protobuf.ByteString getStopRow() {
16526         return stopRow_;
16527       }
16528       /**
16529        * <code>optional bytes stop_row = 3;</code>
16530        */
setStopRow(com.google.protobuf.ByteString value)16531       public Builder setStopRow(com.google.protobuf.ByteString value) {
16532         if (value == null) {
16533     throw new NullPointerException();
16534   }
16535   bitField0_ |= 0x00000004;
16536         stopRow_ = value;
16537         onChanged();
16538         return this;
16539       }
16540       /**
16541        * <code>optional bytes stop_row = 3;</code>
16542        */
clearStopRow()16543       public Builder clearStopRow() {
16544         bitField0_ = (bitField0_ & ~0x00000004);
16545         stopRow_ = getDefaultInstance().getStopRow();
16546         onChanged();
16547         return this;
16548       }
16549 
16550       // optional bool stop_row_inclusive = 4;
16551       private boolean stopRowInclusive_ ;
16552       /**
16553        * <code>optional bool stop_row_inclusive = 4;</code>
16554        */
hasStopRowInclusive()16555       public boolean hasStopRowInclusive() {
16556         return ((bitField0_ & 0x00000008) == 0x00000008);
16557       }
16558       /**
16559        * <code>optional bool stop_row_inclusive = 4;</code>
16560        */
getStopRowInclusive()16561       public boolean getStopRowInclusive() {
16562         return stopRowInclusive_;
16563       }
16564       /**
16565        * <code>optional bool stop_row_inclusive = 4;</code>
16566        */
setStopRowInclusive(boolean value)16567       public Builder setStopRowInclusive(boolean value) {
16568         bitField0_ |= 0x00000008;
16569         stopRowInclusive_ = value;
16570         onChanged();
16571         return this;
16572       }
16573       /**
16574        * <code>optional bool stop_row_inclusive = 4;</code>
16575        */
clearStopRowInclusive()16576       public Builder clearStopRowInclusive() {
16577         bitField0_ = (bitField0_ & ~0x00000008);
16578         stopRowInclusive_ = false;
16579         onChanged();
16580         return this;
16581       }
16582 
16583       // @@protoc_insertion_point(builder_scope:RowRange)
16584     }
16585 
16586     static {
16587       defaultInstance = new RowRange(true);
defaultInstance.initFields()16588       defaultInstance.initFields();
16589     }
16590 
16591     // @@protoc_insertion_point(class_scope:RowRange)
16592   }
16593 
16594   public interface MultiRowRangeFilterOrBuilder
16595       extends com.google.protobuf.MessageOrBuilder {
16596 
16597     // repeated .RowRange row_range_list = 1;
16598     /**
16599      * <code>repeated .RowRange row_range_list = 1;</code>
16600      */
16601     java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>
getRowRangeListList()16602         getRowRangeListList();
16603     /**
16604      * <code>repeated .RowRange row_range_list = 1;</code>
16605      */
getRowRangeList(int index)16606     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index);
16607     /**
16608      * <code>repeated .RowRange row_range_list = 1;</code>
16609      */
getRowRangeListCount()16610     int getRowRangeListCount();
16611     /**
16612      * <code>repeated .RowRange row_range_list = 1;</code>
16613      */
16614     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListOrBuilderList()16615         getRowRangeListOrBuilderList();
16616     /**
16617      * <code>repeated .RowRange row_range_list = 1;</code>
16618      */
getRowRangeListOrBuilder( int index)16619     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder(
16620         int index);
16621   }
16622   /**
16623    * Protobuf type {@code MultiRowRangeFilter}
16624    */
16625   public static final class MultiRowRangeFilter extends
16626       com.google.protobuf.GeneratedMessage
16627       implements MultiRowRangeFilterOrBuilder {
16628     // Use MultiRowRangeFilter.newBuilder() to construct.
MultiRowRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)16629     private MultiRowRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
16630       super(builder);
16631       this.unknownFields = builder.getUnknownFields();
16632     }
MultiRowRangeFilter(boolean noInit)16633     private MultiRowRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
16634 
16635     private static final MultiRowRangeFilter defaultInstance;
getDefaultInstance()16636     public static MultiRowRangeFilter getDefaultInstance() {
16637       return defaultInstance;
16638     }
16639 
getDefaultInstanceForType()16640     public MultiRowRangeFilter getDefaultInstanceForType() {
16641       return defaultInstance;
16642     }
16643 
16644     private final com.google.protobuf.UnknownFieldSet unknownFields;
16645     @java.lang.Override
16646     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()16647         getUnknownFields() {
16648       return this.unknownFields;
16649     }
MultiRowRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16650     private MultiRowRangeFilter(
16651         com.google.protobuf.CodedInputStream input,
16652         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16653         throws com.google.protobuf.InvalidProtocolBufferException {
16654       initFields();
16655       int mutable_bitField0_ = 0;
16656       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
16657           com.google.protobuf.UnknownFieldSet.newBuilder();
16658       try {
16659         boolean done = false;
16660         while (!done) {
16661           int tag = input.readTag();
16662           switch (tag) {
16663             case 0:
16664               done = true;
16665               break;
16666             default: {
16667               if (!parseUnknownField(input, unknownFields,
16668                                      extensionRegistry, tag)) {
16669                 done = true;
16670               }
16671               break;
16672             }
16673             case 10: {
16674               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
16675                 rowRangeList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>();
16676                 mutable_bitField0_ |= 0x00000001;
16677               }
16678               rowRangeList_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.PARSER, extensionRegistry));
16679               break;
16680             }
16681           }
16682         }
16683       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
16684         throw e.setUnfinishedMessage(this);
16685       } catch (java.io.IOException e) {
16686         throw new com.google.protobuf.InvalidProtocolBufferException(
16687             e.getMessage()).setUnfinishedMessage(this);
16688       } finally {
16689         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
16690           rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_);
16691         }
16692         this.unknownFields = unknownFields.build();
16693         makeExtensionsImmutable();
16694       }
16695     }
16696     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16697         getDescriptor() {
16698       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultiRowRangeFilter_descriptor;
16699     }
16700 
16701     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16702         internalGetFieldAccessorTable() {
16703       return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultiRowRangeFilter_fieldAccessorTable
16704           .ensureFieldAccessorsInitialized(
16705               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class);
16706     }
16707 
16708     public static com.google.protobuf.Parser<MultiRowRangeFilter> PARSER =
16709         new com.google.protobuf.AbstractParser<MultiRowRangeFilter>() {
16710       public MultiRowRangeFilter parsePartialFrom(
16711           com.google.protobuf.CodedInputStream input,
16712           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16713           throws com.google.protobuf.InvalidProtocolBufferException {
16714         return new MultiRowRangeFilter(input, extensionRegistry);
16715       }
16716     };
16717 
16718     @java.lang.Override
getParserForType()16719     public com.google.protobuf.Parser<MultiRowRangeFilter> getParserForType() {
16720       return PARSER;
16721     }
16722 
16723     // repeated .RowRange row_range_list = 1;
16724     public static final int ROW_RANGE_LIST_FIELD_NUMBER = 1;
16725     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> rowRangeList_;
16726     /**
16727      * <code>repeated .RowRange row_range_list = 1;</code>
16728      */
getRowRangeListList()16729     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList() {
16730       return rowRangeList_;
16731     }
16732     /**
16733      * <code>repeated .RowRange row_range_list = 1;</code>
16734      */
16735     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListOrBuilderList()16736         getRowRangeListOrBuilderList() {
16737       return rowRangeList_;
16738     }
16739     /**
16740      * <code>repeated .RowRange row_range_list = 1;</code>
16741      */
getRowRangeListCount()16742     public int getRowRangeListCount() {
16743       return rowRangeList_.size();
16744     }
16745     /**
16746      * <code>repeated .RowRange row_range_list = 1;</code>
16747      */
getRowRangeList(int index)16748     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index) {
16749       return rowRangeList_.get(index);
16750     }
16751     /**
16752      * <code>repeated .RowRange row_range_list = 1;</code>
16753      */
getRowRangeListOrBuilder( int index)16754     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder(
16755         int index) {
16756       return rowRangeList_.get(index);
16757     }
16758 
initFields()16759     private void initFields() {
16760       rowRangeList_ = java.util.Collections.emptyList();
16761     }
16762     private byte memoizedIsInitialized = -1;
isInitialized()16763     public final boolean isInitialized() {
16764       byte isInitialized = memoizedIsInitialized;
16765       if (isInitialized != -1) return isInitialized == 1;
16766 
16767       memoizedIsInitialized = 1;
16768       return true;
16769     }
16770 
writeTo(com.google.protobuf.CodedOutputStream output)16771     public void writeTo(com.google.protobuf.CodedOutputStream output)
16772                         throws java.io.IOException {
16773       getSerializedSize();
16774       for (int i = 0; i < rowRangeList_.size(); i++) {
16775         output.writeMessage(1, rowRangeList_.get(i));
16776       }
16777       getUnknownFields().writeTo(output);
16778     }
16779 
16780     private int memoizedSerializedSize = -1;
getSerializedSize()16781     public int getSerializedSize() {
16782       int size = memoizedSerializedSize;
16783       if (size != -1) return size;
16784 
16785       size = 0;
16786       for (int i = 0; i < rowRangeList_.size(); i++) {
16787         size += com.google.protobuf.CodedOutputStream
16788           .computeMessageSize(1, rowRangeList_.get(i));
16789       }
16790       size += getUnknownFields().getSerializedSize();
16791       memoizedSerializedSize = size;
16792       return size;
16793     }
16794 
16795     private static final long serialVersionUID = 0L;
16796     @java.lang.Override
writeReplace()16797     protected java.lang.Object writeReplace()
16798         throws java.io.ObjectStreamException {
16799       return super.writeReplace();
16800     }
16801 
16802     @java.lang.Override
equals(final java.lang.Object obj)16803     public boolean equals(final java.lang.Object obj) {
16804       if (obj == this) {
16805        return true;
16806       }
16807       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter)) {
16808         return super.equals(obj);
16809       }
16810       org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) obj;
16811 
16812       boolean result = true;
16813       result = result && getRowRangeListList()
16814           .equals(other.getRowRangeListList());
16815       result = result &&
16816           getUnknownFields().equals(other.getUnknownFields());
16817       return result;
16818     }
16819 
16820     private int memoizedHashCode = 0;
16821     @java.lang.Override
hashCode()16822     public int hashCode() {
16823       if (memoizedHashCode != 0) {
16824         return memoizedHashCode;
16825       }
16826       int hash = 41;
16827       hash = (19 * hash) + getDescriptorForType().hashCode();
16828       if (getRowRangeListCount() > 0) {
16829         hash = (37 * hash) + ROW_RANGE_LIST_FIELD_NUMBER;
16830         hash = (53 * hash) + getRowRangeListList().hashCode();
16831       }
16832       hash = (29 * hash) + getUnknownFields().hashCode();
16833       memoizedHashCode = hash;
16834       return hash;
16835     }
16836 
parseFrom( com.google.protobuf.ByteString data)16837     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
16838         com.google.protobuf.ByteString data)
16839         throws com.google.protobuf.InvalidProtocolBufferException {
16840       return PARSER.parseFrom(data);
16841     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16842     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
16843         com.google.protobuf.ByteString data,
16844         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16845         throws com.google.protobuf.InvalidProtocolBufferException {
16846       return PARSER.parseFrom(data, extensionRegistry);
16847     }
parseFrom(byte[] data)16848     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(byte[] data)
16849         throws com.google.protobuf.InvalidProtocolBufferException {
16850       return PARSER.parseFrom(data);
16851     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16852     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
16853         byte[] data,
16854         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16855         throws com.google.protobuf.InvalidProtocolBufferException {
16856       return PARSER.parseFrom(data, extensionRegistry);
16857     }
parseFrom(java.io.InputStream input)16858     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(java.io.InputStream input)
16859         throws java.io.IOException {
16860       return PARSER.parseFrom(input);
16861     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16862     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
16863         java.io.InputStream input,
16864         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16865         throws java.io.IOException {
16866       return PARSER.parseFrom(input, extensionRegistry);
16867     }
parseDelimitedFrom(java.io.InputStream input)16868     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom(java.io.InputStream input)
16869         throws java.io.IOException {
16870       return PARSER.parseDelimitedFrom(input);
16871     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16872     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseDelimitedFrom(
16873         java.io.InputStream input,
16874         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16875         throws java.io.IOException {
16876       return PARSER.parseDelimitedFrom(input, extensionRegistry);
16877     }
parseFrom( com.google.protobuf.CodedInputStream input)16878     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
16879         com.google.protobuf.CodedInputStream input)
16880         throws java.io.IOException {
16881       return PARSER.parseFrom(input);
16882     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)16883     public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parseFrom(
16884         com.google.protobuf.CodedInputStream input,
16885         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
16886         throws java.io.IOException {
16887       return PARSER.parseFrom(input, extensionRegistry);
16888     }
16889 
newBuilder()16890     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()16891     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter prototype)16892     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter prototype) {
16893       return newBuilder().mergeFrom(prototype);
16894     }
toBuilder()16895     public Builder toBuilder() { return newBuilder(this); }
16896 
16897     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)16898     protected Builder newBuilderForType(
16899         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16900       Builder builder = new Builder(parent);
16901       return builder;
16902     }
16903     /**
16904      * Protobuf type {@code MultiRowRangeFilter}
16905      */
16906     public static final class Builder extends
16907         com.google.protobuf.GeneratedMessage.Builder<Builder>
16908        implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilterOrBuilder {
16909       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()16910           getDescriptor() {
16911         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultiRowRangeFilter_descriptor;
16912       }
16913 
16914       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()16915           internalGetFieldAccessorTable() {
16916         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultiRowRangeFilter_fieldAccessorTable
16917             .ensureFieldAccessorsInitialized(
16918                 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.Builder.class);
16919       }
16920 
16921       // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.newBuilder()
Builder()16922       private Builder() {
16923         maybeForceBuilderInitialization();
16924       }
16925 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)16926       private Builder(
16927           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
16928         super(parent);
16929         maybeForceBuilderInitialization();
16930       }
maybeForceBuilderInitialization()16931       private void maybeForceBuilderInitialization() {
16932         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
16933           getRowRangeListFieldBuilder();
16934         }
16935       }
create()16936       private static Builder create() {
16937         return new Builder();
16938       }
16939 
clear()16940       public Builder clear() {
16941         super.clear();
16942         if (rowRangeListBuilder_ == null) {
16943           rowRangeList_ = java.util.Collections.emptyList();
16944           bitField0_ = (bitField0_ & ~0x00000001);
16945         } else {
16946           rowRangeListBuilder_.clear();
16947         }
16948         return this;
16949       }
16950 
clone()16951       public Builder clone() {
16952         return create().mergeFrom(buildPartial());
16953       }
16954 
16955       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()16956           getDescriptorForType() {
16957         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultiRowRangeFilter_descriptor;
16958       }
16959 
getDefaultInstanceForType()16960       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter getDefaultInstanceForType() {
16961         return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.getDefaultInstance();
16962       }
16963 
build()16964       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter build() {
16965         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter result = buildPartial();
16966         if (!result.isInitialized()) {
16967           throw newUninitializedMessageException(result);
16968         }
16969         return result;
16970       }
16971 
buildPartial()16972       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter buildPartial() {
16973         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter(this);
16974         int from_bitField0_ = bitField0_;
16975         if (rowRangeListBuilder_ == null) {
16976           if (((bitField0_ & 0x00000001) == 0x00000001)) {
16977             rowRangeList_ = java.util.Collections.unmodifiableList(rowRangeList_);
16978             bitField0_ = (bitField0_ & ~0x00000001);
16979           }
16980           result.rowRangeList_ = rowRangeList_;
16981         } else {
16982           result.rowRangeList_ = rowRangeListBuilder_.build();
16983         }
16984         onBuilt();
16985         return result;
16986       }
16987 
mergeFrom(com.google.protobuf.Message other)16988       public Builder mergeFrom(com.google.protobuf.Message other) {
16989         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) {
16990           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter)other);
16991         } else {
16992           super.mergeFrom(other);
16993           return this;
16994         }
16995       }
16996 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other)16997       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter other) {
16998         if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter.getDefaultInstance()) return this;
16999         if (rowRangeListBuilder_ == null) {
17000           if (!other.rowRangeList_.isEmpty()) {
17001             if (rowRangeList_.isEmpty()) {
17002               rowRangeList_ = other.rowRangeList_;
17003               bitField0_ = (bitField0_ & ~0x00000001);
17004             } else {
17005               ensureRowRangeListIsMutable();
17006               rowRangeList_.addAll(other.rowRangeList_);
17007             }
17008             onChanged();
17009           }
17010         } else {
17011           if (!other.rowRangeList_.isEmpty()) {
17012             if (rowRangeListBuilder_.isEmpty()) {
17013               rowRangeListBuilder_.dispose();
17014               rowRangeListBuilder_ = null;
17015               rowRangeList_ = other.rowRangeList_;
17016               bitField0_ = (bitField0_ & ~0x00000001);
17017               rowRangeListBuilder_ =
17018                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
17019                    getRowRangeListFieldBuilder() : null;
17020             } else {
17021               rowRangeListBuilder_.addAllMessages(other.rowRangeList_);
17022             }
17023           }
17024         }
17025         this.mergeUnknownFields(other.getUnknownFields());
17026         return this;
17027       }
17028 
isInitialized()17029       public final boolean isInitialized() {
17030         return true;
17031       }
17032 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17033       public Builder mergeFrom(
17034           com.google.protobuf.CodedInputStream input,
17035           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17036           throws java.io.IOException {
17037         org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter parsedMessage = null;
17038         try {
17039           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
17040         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17041           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultiRowRangeFilter) e.getUnfinishedMessage();
17042           throw e;
17043         } finally {
17044           if (parsedMessage != null) {
17045             mergeFrom(parsedMessage);
17046           }
17047         }
17048         return this;
17049       }
17050       private int bitField0_;
17051 
17052       // repeated .RowRange row_range_list = 1;
17053       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> rowRangeList_ =
17054         java.util.Collections.emptyList();
ensureRowRangeListIsMutable()17055       private void ensureRowRangeListIsMutable() {
17056         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
17057           rowRangeList_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange>(rowRangeList_);
17058           bitField0_ |= 0x00000001;
17059          }
17060       }
17061 
17062       private com.google.protobuf.RepeatedFieldBuilder<
17063           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder> rowRangeListBuilder_;
17064 
17065       /**
17066        * <code>repeated .RowRange row_range_list = 1;</code>
17067        */
getRowRangeListList()17068       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> getRowRangeListList() {
17069         if (rowRangeListBuilder_ == null) {
17070           return java.util.Collections.unmodifiableList(rowRangeList_);
17071         } else {
17072           return rowRangeListBuilder_.getMessageList();
17073         }
17074       }
17075       /**
17076        * <code>repeated .RowRange row_range_list = 1;</code>
17077        */
getRowRangeListCount()17078       public int getRowRangeListCount() {
17079         if (rowRangeListBuilder_ == null) {
17080           return rowRangeList_.size();
17081         } else {
17082           return rowRangeListBuilder_.getCount();
17083         }
17084       }
17085       /**
17086        * <code>repeated .RowRange row_range_list = 1;</code>
17087        */
getRowRangeList(int index)17088       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange getRowRangeList(int index) {
17089         if (rowRangeListBuilder_ == null) {
17090           return rowRangeList_.get(index);
17091         } else {
17092           return rowRangeListBuilder_.getMessage(index);
17093         }
17094       }
17095       /**
17096        * <code>repeated .RowRange row_range_list = 1;</code>
17097        */
setRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value)17098       public Builder setRowRangeList(
17099           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) {
17100         if (rowRangeListBuilder_ == null) {
17101           if (value == null) {
17102             throw new NullPointerException();
17103           }
17104           ensureRowRangeListIsMutable();
17105           rowRangeList_.set(index, value);
17106           onChanged();
17107         } else {
17108           rowRangeListBuilder_.setMessage(index, value);
17109         }
17110         return this;
17111       }
17112       /**
17113        * <code>repeated .RowRange row_range_list = 1;</code>
17114        */
setRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue)17115       public Builder setRowRangeList(
17116           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) {
17117         if (rowRangeListBuilder_ == null) {
17118           ensureRowRangeListIsMutable();
17119           rowRangeList_.set(index, builderForValue.build());
17120           onChanged();
17121         } else {
17122           rowRangeListBuilder_.setMessage(index, builderForValue.build());
17123         }
17124         return this;
17125       }
17126       /**
17127        * <code>repeated .RowRange row_range_list = 1;</code>
17128        */
addRowRangeList(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value)17129       public Builder addRowRangeList(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) {
17130         if (rowRangeListBuilder_ == null) {
17131           if (value == null) {
17132             throw new NullPointerException();
17133           }
17134           ensureRowRangeListIsMutable();
17135           rowRangeList_.add(value);
17136           onChanged();
17137         } else {
17138           rowRangeListBuilder_.addMessage(value);
17139         }
17140         return this;
17141       }
17142       /**
17143        * <code>repeated .RowRange row_range_list = 1;</code>
17144        */
addRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value)17145       public Builder addRowRangeList(
17146           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange value) {
17147         if (rowRangeListBuilder_ == null) {
17148           if (value == null) {
17149             throw new NullPointerException();
17150           }
17151           ensureRowRangeListIsMutable();
17152           rowRangeList_.add(index, value);
17153           onChanged();
17154         } else {
17155           rowRangeListBuilder_.addMessage(index, value);
17156         }
17157         return this;
17158       }
17159       /**
17160        * <code>repeated .RowRange row_range_list = 1;</code>
17161        */
addRowRangeList( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue)17162       public Builder addRowRangeList(
17163           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) {
17164         if (rowRangeListBuilder_ == null) {
17165           ensureRowRangeListIsMutable();
17166           rowRangeList_.add(builderForValue.build());
17167           onChanged();
17168         } else {
17169           rowRangeListBuilder_.addMessage(builderForValue.build());
17170         }
17171         return this;
17172       }
17173       /**
17174        * <code>repeated .RowRange row_range_list = 1;</code>
17175        */
addRowRangeList( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue)17176       public Builder addRowRangeList(
17177           int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder builderForValue) {
17178         if (rowRangeListBuilder_ == null) {
17179           ensureRowRangeListIsMutable();
17180           rowRangeList_.add(index, builderForValue.build());
17181           onChanged();
17182         } else {
17183           rowRangeListBuilder_.addMessage(index, builderForValue.build());
17184         }
17185         return this;
17186       }
17187       /**
17188        * <code>repeated .RowRange row_range_list = 1;</code>
17189        */
addAllRowRangeList( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> values)17190       public Builder addAllRowRangeList(
17191           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange> values) {
17192         if (rowRangeListBuilder_ == null) {
17193           ensureRowRangeListIsMutable();
17194           super.addAll(values, rowRangeList_);
17195           onChanged();
17196         } else {
17197           rowRangeListBuilder_.addAllMessages(values);
17198         }
17199         return this;
17200       }
17201       /**
17202        * <code>repeated .RowRange row_range_list = 1;</code>
17203        */
clearRowRangeList()17204       public Builder clearRowRangeList() {
17205         if (rowRangeListBuilder_ == null) {
17206           rowRangeList_ = java.util.Collections.emptyList();
17207           bitField0_ = (bitField0_ & ~0x00000001);
17208           onChanged();
17209         } else {
17210           rowRangeListBuilder_.clear();
17211         }
17212         return this;
17213       }
17214       /**
17215        * <code>repeated .RowRange row_range_list = 1;</code>
17216        */
removeRowRangeList(int index)17217       public Builder removeRowRangeList(int index) {
17218         if (rowRangeListBuilder_ == null) {
17219           ensureRowRangeListIsMutable();
17220           rowRangeList_.remove(index);
17221           onChanged();
17222         } else {
17223           rowRangeListBuilder_.remove(index);
17224         }
17225         return this;
17226       }
17227       /**
17228        * <code>repeated .RowRange row_range_list = 1;</code>
17229        */
getRowRangeListBuilder( int index)17230       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder getRowRangeListBuilder(
17231           int index) {
17232         return getRowRangeListFieldBuilder().getBuilder(index);
17233       }
17234       /**
17235        * <code>repeated .RowRange row_range_list = 1;</code>
17236        */
getRowRangeListOrBuilder( int index)17237       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder getRowRangeListOrBuilder(
17238           int index) {
17239         if (rowRangeListBuilder_ == null) {
17240           return rowRangeList_.get(index);  } else {
17241           return rowRangeListBuilder_.getMessageOrBuilder(index);
17242         }
17243       }
17244       /**
17245        * <code>repeated .RowRange row_range_list = 1;</code>
17246        */
17247       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListOrBuilderList()17248            getRowRangeListOrBuilderList() {
17249         if (rowRangeListBuilder_ != null) {
17250           return rowRangeListBuilder_.getMessageOrBuilderList();
17251         } else {
17252           return java.util.Collections.unmodifiableList(rowRangeList_);
17253         }
17254       }
17255       /**
17256        * <code>repeated .RowRange row_range_list = 1;</code>
17257        */
addRowRangeListBuilder()17258       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder addRowRangeListBuilder() {
17259         return getRowRangeListFieldBuilder().addBuilder(
17260             org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance());
17261       }
17262       /**
17263        * <code>repeated .RowRange row_range_list = 1;</code>
17264        */
addRowRangeListBuilder( int index)17265       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder addRowRangeListBuilder(
17266           int index) {
17267         return getRowRangeListFieldBuilder().addBuilder(
17268             index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.getDefaultInstance());
17269       }
17270       /**
17271        * <code>repeated .RowRange row_range_list = 1;</code>
17272        */
17273       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder>
getRowRangeListBuilderList()17274            getRowRangeListBuilderList() {
17275         return getRowRangeListFieldBuilder().getBuilderList();
17276       }
17277       private com.google.protobuf.RepeatedFieldBuilder<
17278           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>
getRowRangeListFieldBuilder()17279           getRowRangeListFieldBuilder() {
17280         if (rowRangeListBuilder_ == null) {
17281           rowRangeListBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
17282               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRange.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.RowRangeOrBuilder>(
17283                   rowRangeList_,
17284                   ((bitField0_ & 0x00000001) == 0x00000001),
17285                   getParentForChildren(),
17286                   isClean());
17287           rowRangeList_ = null;
17288         }
17289         return rowRangeListBuilder_;
17290       }
17291 
17292       // @@protoc_insertion_point(builder_scope:MultiRowRangeFilter)
17293     }
17294 
17295     static {
17296       defaultInstance = new MultiRowRangeFilter(true);
defaultInstance.initFields()17297       defaultInstance.initFields();
17298     }
17299 
17300     // @@protoc_insertion_point(class_scope:MultiRowRangeFilter)
17301   }
17302 
17303   private static com.google.protobuf.Descriptors.Descriptor
17304     internal_static_Filter_descriptor;
17305   private static
17306     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17307       internal_static_Filter_fieldAccessorTable;
17308   private static com.google.protobuf.Descriptors.Descriptor
17309     internal_static_ColumnCountGetFilter_descriptor;
17310   private static
17311     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17312       internal_static_ColumnCountGetFilter_fieldAccessorTable;
17313   private static com.google.protobuf.Descriptors.Descriptor
17314     internal_static_ColumnPaginationFilter_descriptor;
17315   private static
17316     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17317       internal_static_ColumnPaginationFilter_fieldAccessorTable;
17318   private static com.google.protobuf.Descriptors.Descriptor
17319     internal_static_ColumnPrefixFilter_descriptor;
17320   private static
17321     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17322       internal_static_ColumnPrefixFilter_fieldAccessorTable;
17323   private static com.google.protobuf.Descriptors.Descriptor
17324     internal_static_ColumnRangeFilter_descriptor;
17325   private static
17326     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17327       internal_static_ColumnRangeFilter_fieldAccessorTable;
17328   private static com.google.protobuf.Descriptors.Descriptor
17329     internal_static_CompareFilter_descriptor;
17330   private static
17331     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17332       internal_static_CompareFilter_fieldAccessorTable;
17333   private static com.google.protobuf.Descriptors.Descriptor
17334     internal_static_DependentColumnFilter_descriptor;
17335   private static
17336     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17337       internal_static_DependentColumnFilter_fieldAccessorTable;
17338   private static com.google.protobuf.Descriptors.Descriptor
17339     internal_static_FamilyFilter_descriptor;
17340   private static
17341     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17342       internal_static_FamilyFilter_fieldAccessorTable;
17343   private static com.google.protobuf.Descriptors.Descriptor
17344     internal_static_FilterList_descriptor;
17345   private static
17346     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17347       internal_static_FilterList_fieldAccessorTable;
17348   private static com.google.protobuf.Descriptors.Descriptor
17349     internal_static_FilterWrapper_descriptor;
17350   private static
17351     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17352       internal_static_FilterWrapper_fieldAccessorTable;
17353   private static com.google.protobuf.Descriptors.Descriptor
17354     internal_static_FirstKeyOnlyFilter_descriptor;
17355   private static
17356     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17357       internal_static_FirstKeyOnlyFilter_fieldAccessorTable;
17358   private static com.google.protobuf.Descriptors.Descriptor
17359     internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor;
17360   private static
17361     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17362       internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable;
17363   private static com.google.protobuf.Descriptors.Descriptor
17364     internal_static_FuzzyRowFilter_descriptor;
17365   private static
17366     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17367       internal_static_FuzzyRowFilter_fieldAccessorTable;
17368   private static com.google.protobuf.Descriptors.Descriptor
17369     internal_static_InclusiveStopFilter_descriptor;
17370   private static
17371     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17372       internal_static_InclusiveStopFilter_fieldAccessorTable;
17373   private static com.google.protobuf.Descriptors.Descriptor
17374     internal_static_KeyOnlyFilter_descriptor;
17375   private static
17376     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17377       internal_static_KeyOnlyFilter_fieldAccessorTable;
17378   private static com.google.protobuf.Descriptors.Descriptor
17379     internal_static_MultipleColumnPrefixFilter_descriptor;
17380   private static
17381     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17382       internal_static_MultipleColumnPrefixFilter_fieldAccessorTable;
17383   private static com.google.protobuf.Descriptors.Descriptor
17384     internal_static_PageFilter_descriptor;
17385   private static
17386     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17387       internal_static_PageFilter_fieldAccessorTable;
17388   private static com.google.protobuf.Descriptors.Descriptor
17389     internal_static_PrefixFilter_descriptor;
17390   private static
17391     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17392       internal_static_PrefixFilter_fieldAccessorTable;
17393   private static com.google.protobuf.Descriptors.Descriptor
17394     internal_static_QualifierFilter_descriptor;
17395   private static
17396     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17397       internal_static_QualifierFilter_fieldAccessorTable;
17398   private static com.google.protobuf.Descriptors.Descriptor
17399     internal_static_RandomRowFilter_descriptor;
17400   private static
17401     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17402       internal_static_RandomRowFilter_fieldAccessorTable;
17403   private static com.google.protobuf.Descriptors.Descriptor
17404     internal_static_RowFilter_descriptor;
17405   private static
17406     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17407       internal_static_RowFilter_fieldAccessorTable;
17408   private static com.google.protobuf.Descriptors.Descriptor
17409     internal_static_SingleColumnValueExcludeFilter_descriptor;
17410   private static
17411     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17412       internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable;
17413   private static com.google.protobuf.Descriptors.Descriptor
17414     internal_static_SingleColumnValueFilter_descriptor;
17415   private static
17416     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17417       internal_static_SingleColumnValueFilter_fieldAccessorTable;
17418   private static com.google.protobuf.Descriptors.Descriptor
17419     internal_static_SkipFilter_descriptor;
17420   private static
17421     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17422       internal_static_SkipFilter_fieldAccessorTable;
17423   private static com.google.protobuf.Descriptors.Descriptor
17424     internal_static_TimestampsFilter_descriptor;
17425   private static
17426     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17427       internal_static_TimestampsFilter_fieldAccessorTable;
17428   private static com.google.protobuf.Descriptors.Descriptor
17429     internal_static_ValueFilter_descriptor;
17430   private static
17431     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17432       internal_static_ValueFilter_fieldAccessorTable;
17433   private static com.google.protobuf.Descriptors.Descriptor
17434     internal_static_WhileMatchFilter_descriptor;
17435   private static
17436     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17437       internal_static_WhileMatchFilter_fieldAccessorTable;
17438   private static com.google.protobuf.Descriptors.Descriptor
17439     internal_static_FilterAllFilter_descriptor;
17440   private static
17441     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17442       internal_static_FilterAllFilter_fieldAccessorTable;
17443   private static com.google.protobuf.Descriptors.Descriptor
17444     internal_static_RowRange_descriptor;
17445   private static
17446     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17447       internal_static_RowRange_fieldAccessorTable;
17448   private static com.google.protobuf.Descriptors.Descriptor
17449     internal_static_MultiRowRangeFilter_descriptor;
17450   private static
17451     com.google.protobuf.GeneratedMessage.FieldAccessorTable
17452       internal_static_MultiRowRangeFilter_fieldAccessorTable;
17453 
17454   public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor()17455       getDescriptor() {
17456     return descriptor;
17457   }
17458   private static com.google.protobuf.Descriptors.FileDescriptor
17459       descriptor;
17460   static {
17461     java.lang.String[] descriptorData = {
17462       "\n\014Filter.proto\032\013HBase.proto\032\020Comparator." +
17463       "proto\"1\n\006Filter\022\014\n\004name\030\001 \002(\t\022\031\n\021seriali" +
17464       "zed_filter\030\002 \001(\014\"%\n\024ColumnCountGetFilter" +
17465       "\022\r\n\005limit\030\001 \002(\005\"N\n\026ColumnPaginationFilte" +
17466       "r\022\r\n\005limit\030\001 \002(\005\022\016\n\006offset\030\002 \001(\005\022\025\n\rcolu" +
17467       "mn_offset\030\003 \001(\014\"$\n\022ColumnPrefixFilter\022\016\n" +
17468       "\006prefix\030\001 \002(\014\"w\n\021ColumnRangeFilter\022\022\n\nmi" +
17469       "n_column\030\001 \001(\014\022\034\n\024min_column_inclusive\030\002" +
17470       " \001(\010\022\022\n\nmax_column\030\003 \001(\014\022\034\n\024max_column_i" +
17471       "nclusive\030\004 \001(\010\"R\n\rCompareFilter\022 \n\ncompa",
17472       "re_op\030\001 \002(\0162\014.CompareType\022\037\n\ncomparator\030" +
17473       "\002 \001(\0132\013.Comparator\"\217\001\n\025DependentColumnFi" +
17474       "lter\022&\n\016compare_filter\030\001 \002(\0132\016.CompareFi" +
17475       "lter\022\025\n\rcolumn_family\030\002 \001(\014\022\030\n\020column_qu" +
17476       "alifier\030\003 \001(\014\022\035\n\025drop_dependent_column\030\004" +
17477       " \001(\010\"6\n\014FamilyFilter\022&\n\016compare_filter\030\001" +
17478       " \002(\0132\016.CompareFilter\"\200\001\n\nFilterList\022&\n\010o" +
17479       "perator\030\001 \002(\0162\024.FilterList.Operator\022\030\n\007f" +
17480       "ilters\030\002 \003(\0132\007.Filter\"0\n\010Operator\022\021\n\rMUS" +
17481       "T_PASS_ALL\020\001\022\021\n\rMUST_PASS_ONE\020\002\"(\n\rFilte",
17482       "rWrapper\022\027\n\006filter\030\001 \002(\0132\007.Filter\"\024\n\022Fir" +
17483       "stKeyOnlyFilter\";\n%FirstKeyValueMatching" +
17484       "QualifiersFilter\022\022\n\nqualifiers\030\001 \003(\014\":\n\016" +
17485       "FuzzyRowFilter\022(\n\017fuzzy_keys_data\030\001 \003(\0132" +
17486       "\017.BytesBytesPair\"+\n\023InclusiveStopFilter\022" +
17487       "\024\n\014stop_row_key\030\001 \001(\014\"#\n\rKeyOnlyFilter\022\022" +
17488       "\n\nlen_as_val\030\001 \002(\010\"5\n\032MultipleColumnPref" +
17489       "ixFilter\022\027\n\017sorted_prefixes\030\001 \003(\014\"\037\n\nPag" +
17490       "eFilter\022\021\n\tpage_size\030\001 \002(\003\"\036\n\014PrefixFilt" +
17491       "er\022\016\n\006prefix\030\001 \001(\014\"9\n\017QualifierFilter\022&\n",
17492       "\016compare_filter\030\001 \002(\0132\016.CompareFilter\"!\n" +
17493       "\017RandomRowFilter\022\016\n\006chance\030\001 \002(\002\"3\n\tRowF" +
17494       "ilter\022&\n\016compare_filter\030\001 \002(\0132\016.CompareF" +
17495       "ilter\"^\n\036SingleColumnValueExcludeFilter\022" +
17496       "<\n\032single_column_value_filter\030\001 \002(\0132\030.Si" +
17497       "ngleColumnValueFilter\"\305\001\n\027SingleColumnVa" +
17498       "lueFilter\022\025\n\rcolumn_family\030\001 \001(\014\022\030\n\020colu" +
17499       "mn_qualifier\030\002 \001(\014\022 \n\ncompare_op\030\003 \002(\0162\014" +
17500       ".CompareType\022\037\n\ncomparator\030\004 \002(\0132\013.Compa" +
17501       "rator\022\031\n\021filter_if_missing\030\005 \001(\010\022\033\n\023late",
17502       "st_version_only\030\006 \001(\010\"%\n\nSkipFilter\022\027\n\006f" +
17503       "ilter\030\001 \002(\0132\007.Filter\"*\n\020TimestampsFilter" +
17504       "\022\026\n\ntimestamps\030\001 \003(\003B\002\020\001\"5\n\013ValueFilter\022" +
17505       "&\n\016compare_filter\030\001 \002(\0132\016.CompareFilter\"" +
17506       "+\n\020WhileMatchFilter\022\027\n\006filter\030\001 \002(\0132\007.Fi" +
17507       "lter\"\021\n\017FilterAllFilter\"h\n\010RowRange\022\021\n\ts" +
17508       "tart_row\030\001 \001(\014\022\033\n\023start_row_inclusive\030\002 " +
17509       "\001(\010\022\020\n\010stop_row\030\003 \001(\014\022\032\n\022stop_row_inclus" +
17510       "ive\030\004 \001(\010\"8\n\023MultiRowRangeFilter\022!\n\016row_" +
17511       "range_list\030\001 \003(\0132\t.RowRangeBB\n*org.apach",
17512       "e.hadoop.hbase.protobuf.generatedB\014Filte" +
17513       "rProtosH\001\210\001\001\240\001\001"
17514     };
17515     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
17516       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
17517         public com.google.protobuf.ExtensionRegistry assignDescriptors(
17518             com.google.protobuf.Descriptors.FileDescriptor root) {
17519           descriptor = root;
17520           internal_static_Filter_descriptor =
17521             getDescriptor().getMessageTypes().get(0);
17522           internal_static_Filter_fieldAccessorTable = new
17523             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17524               internal_static_Filter_descriptor,
17525               new java.lang.String[] { "Name", "SerializedFilter", });
17526           internal_static_ColumnCountGetFilter_descriptor =
17527             getDescriptor().getMessageTypes().get(1);
17528           internal_static_ColumnCountGetFilter_fieldAccessorTable = new
17529             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17530               internal_static_ColumnCountGetFilter_descriptor,
17531               new java.lang.String[] { "Limit", });
17532           internal_static_ColumnPaginationFilter_descriptor =
17533             getDescriptor().getMessageTypes().get(2);
17534           internal_static_ColumnPaginationFilter_fieldAccessorTable = new
17535             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17536               internal_static_ColumnPaginationFilter_descriptor,
17537               new java.lang.String[] { "Limit", "Offset", "ColumnOffset", });
17538           internal_static_ColumnPrefixFilter_descriptor =
17539             getDescriptor().getMessageTypes().get(3);
17540           internal_static_ColumnPrefixFilter_fieldAccessorTable = new
17541             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17542               internal_static_ColumnPrefixFilter_descriptor,
17543               new java.lang.String[] { "Prefix", });
17544           internal_static_ColumnRangeFilter_descriptor =
17545             getDescriptor().getMessageTypes().get(4);
17546           internal_static_ColumnRangeFilter_fieldAccessorTable = new
17547             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17548               internal_static_ColumnRangeFilter_descriptor,
17549               new java.lang.String[] { "MinColumn", "MinColumnInclusive", "MaxColumn", "MaxColumnInclusive", });
17550           internal_static_CompareFilter_descriptor =
17551             getDescriptor().getMessageTypes().get(5);
17552           internal_static_CompareFilter_fieldAccessorTable = new
17553             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17554               internal_static_CompareFilter_descriptor,
17555               new java.lang.String[] { "CompareOp", "Comparator", });
17556           internal_static_DependentColumnFilter_descriptor =
17557             getDescriptor().getMessageTypes().get(6);
17558           internal_static_DependentColumnFilter_fieldAccessorTable = new
17559             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17560               internal_static_DependentColumnFilter_descriptor,
17561               new java.lang.String[] { "CompareFilter", "ColumnFamily", "ColumnQualifier", "DropDependentColumn", });
17562           internal_static_FamilyFilter_descriptor =
17563             getDescriptor().getMessageTypes().get(7);
17564           internal_static_FamilyFilter_fieldAccessorTable = new
17565             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17566               internal_static_FamilyFilter_descriptor,
17567               new java.lang.String[] { "CompareFilter", });
17568           internal_static_FilterList_descriptor =
17569             getDescriptor().getMessageTypes().get(8);
17570           internal_static_FilterList_fieldAccessorTable = new
17571             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17572               internal_static_FilterList_descriptor,
17573               new java.lang.String[] { "Operator", "Filters", });
17574           internal_static_FilterWrapper_descriptor =
17575             getDescriptor().getMessageTypes().get(9);
17576           internal_static_FilterWrapper_fieldAccessorTable = new
17577             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17578               internal_static_FilterWrapper_descriptor,
17579               new java.lang.String[] { "Filter", });
17580           internal_static_FirstKeyOnlyFilter_descriptor =
17581             getDescriptor().getMessageTypes().get(10);
17582           internal_static_FirstKeyOnlyFilter_fieldAccessorTable = new
17583             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17584               internal_static_FirstKeyOnlyFilter_descriptor,
17585               new java.lang.String[] { });
17586           internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor =
17587             getDescriptor().getMessageTypes().get(11);
17588           internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable = new
17589             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17590               internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor,
17591               new java.lang.String[] { "Qualifiers", });
17592           internal_static_FuzzyRowFilter_descriptor =
17593             getDescriptor().getMessageTypes().get(12);
17594           internal_static_FuzzyRowFilter_fieldAccessorTable = new
17595             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17596               internal_static_FuzzyRowFilter_descriptor,
17597               new java.lang.String[] { "FuzzyKeysData", });
17598           internal_static_InclusiveStopFilter_descriptor =
17599             getDescriptor().getMessageTypes().get(13);
17600           internal_static_InclusiveStopFilter_fieldAccessorTable = new
17601             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17602               internal_static_InclusiveStopFilter_descriptor,
17603               new java.lang.String[] { "StopRowKey", });
17604           internal_static_KeyOnlyFilter_descriptor =
17605             getDescriptor().getMessageTypes().get(14);
17606           internal_static_KeyOnlyFilter_fieldAccessorTable = new
17607             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17608               internal_static_KeyOnlyFilter_descriptor,
17609               new java.lang.String[] { "LenAsVal", });
17610           internal_static_MultipleColumnPrefixFilter_descriptor =
17611             getDescriptor().getMessageTypes().get(15);
17612           internal_static_MultipleColumnPrefixFilter_fieldAccessorTable = new
17613             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17614               internal_static_MultipleColumnPrefixFilter_descriptor,
17615               new java.lang.String[] { "SortedPrefixes", });
17616           internal_static_PageFilter_descriptor =
17617             getDescriptor().getMessageTypes().get(16);
17618           internal_static_PageFilter_fieldAccessorTable = new
17619             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17620               internal_static_PageFilter_descriptor,
17621               new java.lang.String[] { "PageSize", });
17622           internal_static_PrefixFilter_descriptor =
17623             getDescriptor().getMessageTypes().get(17);
17624           internal_static_PrefixFilter_fieldAccessorTable = new
17625             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17626               internal_static_PrefixFilter_descriptor,
17627               new java.lang.String[] { "Prefix", });
17628           internal_static_QualifierFilter_descriptor =
17629             getDescriptor().getMessageTypes().get(18);
17630           internal_static_QualifierFilter_fieldAccessorTable = new
17631             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17632               internal_static_QualifierFilter_descriptor,
17633               new java.lang.String[] { "CompareFilter", });
17634           internal_static_RandomRowFilter_descriptor =
17635             getDescriptor().getMessageTypes().get(19);
17636           internal_static_RandomRowFilter_fieldAccessorTable = new
17637             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17638               internal_static_RandomRowFilter_descriptor,
17639               new java.lang.String[] { "Chance", });
17640           internal_static_RowFilter_descriptor =
17641             getDescriptor().getMessageTypes().get(20);
17642           internal_static_RowFilter_fieldAccessorTable = new
17643             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17644               internal_static_RowFilter_descriptor,
17645               new java.lang.String[] { "CompareFilter", });
17646           internal_static_SingleColumnValueExcludeFilter_descriptor =
17647             getDescriptor().getMessageTypes().get(21);
17648           internal_static_SingleColumnValueExcludeFilter_fieldAccessorTable = new
17649             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17650               internal_static_SingleColumnValueExcludeFilter_descriptor,
17651               new java.lang.String[] { "SingleColumnValueFilter", });
17652           internal_static_SingleColumnValueFilter_descriptor =
17653             getDescriptor().getMessageTypes().get(22);
17654           internal_static_SingleColumnValueFilter_fieldAccessorTable = new
17655             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17656               internal_static_SingleColumnValueFilter_descriptor,
17657               new java.lang.String[] { "ColumnFamily", "ColumnQualifier", "CompareOp", "Comparator", "FilterIfMissing", "LatestVersionOnly", });
17658           internal_static_SkipFilter_descriptor =
17659             getDescriptor().getMessageTypes().get(23);
17660           internal_static_SkipFilter_fieldAccessorTable = new
17661             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17662               internal_static_SkipFilter_descriptor,
17663               new java.lang.String[] { "Filter", });
17664           internal_static_TimestampsFilter_descriptor =
17665             getDescriptor().getMessageTypes().get(24);
17666           internal_static_TimestampsFilter_fieldAccessorTable = new
17667             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17668               internal_static_TimestampsFilter_descriptor,
17669               new java.lang.String[] { "Timestamps", });
17670           internal_static_ValueFilter_descriptor =
17671             getDescriptor().getMessageTypes().get(25);
17672           internal_static_ValueFilter_fieldAccessorTable = new
17673             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17674               internal_static_ValueFilter_descriptor,
17675               new java.lang.String[] { "CompareFilter", });
17676           internal_static_WhileMatchFilter_descriptor =
17677             getDescriptor().getMessageTypes().get(26);
17678           internal_static_WhileMatchFilter_fieldAccessorTable = new
17679             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17680               internal_static_WhileMatchFilter_descriptor,
17681               new java.lang.String[] { "Filter", });
17682           internal_static_FilterAllFilter_descriptor =
17683             getDescriptor().getMessageTypes().get(27);
17684           internal_static_FilterAllFilter_fieldAccessorTable = new
17685             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17686               internal_static_FilterAllFilter_descriptor,
17687               new java.lang.String[] { });
17688           internal_static_RowRange_descriptor =
17689             getDescriptor().getMessageTypes().get(28);
17690           internal_static_RowRange_fieldAccessorTable = new
17691             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17692               internal_static_RowRange_descriptor,
17693               new java.lang.String[] { "StartRow", "StartRowInclusive", "StopRow", "StopRowInclusive", });
17694           internal_static_MultiRowRangeFilter_descriptor =
17695             getDescriptor().getMessageTypes().get(29);
17696           internal_static_MultiRowRangeFilter_fieldAccessorTable = new
17697             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
17698               internal_static_MultiRowRangeFilter_descriptor,
17699               new java.lang.String[] { "RowRangeList", });
17700           return null;
17701         }
17702       };
17703     com.google.protobuf.Descriptors.FileDescriptor
internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(), }, assigner)17704       .internalBuildGeneratedFileFrom(descriptorData,
17705         new com.google.protobuf.Descriptors.FileDescriptor[] {
17706           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
17707           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(),
17708         }, assigner);
17709   }
17710 
17711   // @@protoc_insertion_point(outer_class_scope)
17712 }
17713