1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: Client.proto
3 
4 package org.apache.hadoop.hbase.protobuf.generated;
5 
6 public final class ClientProtos {
ClientProtos()7   private ClientProtos() {}
registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8   public static void registerAllExtensions(
9       com.google.protobuf.ExtensionRegistry registry) {
10   }
11   /**
12    * Protobuf enum {@code Consistency}
13    *
14    * <pre>
15    **
16    * Consistency defines the expected consistency level for an operation.
17    * </pre>
18    */
19   public enum Consistency
20       implements com.google.protobuf.ProtocolMessageEnum {
21     /**
22      * <code>STRONG = 0;</code>
23      */
24     STRONG(0, 0),
25     /**
26      * <code>TIMELINE = 1;</code>
27      */
28     TIMELINE(1, 1),
29     ;
30 
31     /**
32      * <code>STRONG = 0;</code>
33      */
34     public static final int STRONG_VALUE = 0;
35     /**
36      * <code>TIMELINE = 1;</code>
37      */
38     public static final int TIMELINE_VALUE = 1;
39 
40 
getNumber()41     public final int getNumber() { return value; }
42 
valueOf(int value)43     public static Consistency valueOf(int value) {
44       switch (value) {
45         case 0: return STRONG;
46         case 1: return TIMELINE;
47         default: return null;
48       }
49     }
50 
51     public static com.google.protobuf.Internal.EnumLiteMap<Consistency>
internalGetValueMap()52         internalGetValueMap() {
53       return internalValueMap;
54     }
55     private static com.google.protobuf.Internal.EnumLiteMap<Consistency>
56         internalValueMap =
57           new com.google.protobuf.Internal.EnumLiteMap<Consistency>() {
58             public Consistency findValueByNumber(int number) {
59               return Consistency.valueOf(number);
60             }
61           };
62 
63     public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()64         getValueDescriptor() {
65       return getDescriptor().getValues().get(index);
66     }
67     public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()68         getDescriptorForType() {
69       return getDescriptor();
70     }
71     public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()72         getDescriptor() {
73       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getEnumTypes().get(0);
74     }
75 
76     private static final Consistency[] VALUES = values();
77 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)78     public static Consistency valueOf(
79         com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
80       if (desc.getType() != getDescriptor()) {
81         throw new java.lang.IllegalArgumentException(
82           "EnumValueDescriptor is not for this type.");
83       }
84       return VALUES[desc.getIndex()];
85     }
86 
87     private final int index;
88     private final int value;
89 
Consistency(int index, int value)90     private Consistency(int index, int value) {
91       this.index = index;
92       this.value = value;
93     }
94 
95     // @@protoc_insertion_point(enum_scope:Consistency)
96   }
97 
98   public interface AuthorizationsOrBuilder
99       extends com.google.protobuf.MessageOrBuilder {
100 
101     // repeated string label = 1;
102     /**
103      * <code>repeated string label = 1;</code>
104      */
105     java.util.List<java.lang.String>
getLabelList()106     getLabelList();
107     /**
108      * <code>repeated string label = 1;</code>
109      */
getLabelCount()110     int getLabelCount();
111     /**
112      * <code>repeated string label = 1;</code>
113      */
getLabel(int index)114     java.lang.String getLabel(int index);
115     /**
116      * <code>repeated string label = 1;</code>
117      */
118     com.google.protobuf.ByteString
getLabelBytes(int index)119         getLabelBytes(int index);
120   }
121   /**
122    * Protobuf type {@code Authorizations}
123    *
124    * <pre>
125    **
126    * The protocol buffer version of Authorizations.
127    * </pre>
128    */
129   public static final class Authorizations extends
130       com.google.protobuf.GeneratedMessage
131       implements AuthorizationsOrBuilder {
132     // Use Authorizations.newBuilder() to construct.
Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder)133     private Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
134       super(builder);
135       this.unknownFields = builder.getUnknownFields();
136     }
Authorizations(boolean noInit)137     private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
138 
139     private static final Authorizations defaultInstance;
getDefaultInstance()140     public static Authorizations getDefaultInstance() {
141       return defaultInstance;
142     }
143 
getDefaultInstanceForType()144     public Authorizations getDefaultInstanceForType() {
145       return defaultInstance;
146     }
147 
148     private final com.google.protobuf.UnknownFieldSet unknownFields;
149     @java.lang.Override
150     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()151         getUnknownFields() {
152       return this.unknownFields;
153     }
Authorizations( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)154     private Authorizations(
155         com.google.protobuf.CodedInputStream input,
156         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
157         throws com.google.protobuf.InvalidProtocolBufferException {
158       initFields();
159       int mutable_bitField0_ = 0;
160       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
161           com.google.protobuf.UnknownFieldSet.newBuilder();
162       try {
163         boolean done = false;
164         while (!done) {
165           int tag = input.readTag();
166           switch (tag) {
167             case 0:
168               done = true;
169               break;
170             default: {
171               if (!parseUnknownField(input, unknownFields,
172                                      extensionRegistry, tag)) {
173                 done = true;
174               }
175               break;
176             }
177             case 10: {
178               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
179                 label_ = new com.google.protobuf.LazyStringArrayList();
180                 mutable_bitField0_ |= 0x00000001;
181               }
182               label_.add(input.readBytes());
183               break;
184             }
185           }
186         }
187       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
188         throw e.setUnfinishedMessage(this);
189       } catch (java.io.IOException e) {
190         throw new com.google.protobuf.InvalidProtocolBufferException(
191             e.getMessage()).setUnfinishedMessage(this);
192       } finally {
193         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
194           label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_);
195         }
196         this.unknownFields = unknownFields.build();
197         makeExtensionsImmutable();
198       }
199     }
200     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()201         getDescriptor() {
202       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor;
203     }
204 
205     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()206         internalGetFieldAccessorTable() {
207       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable
208           .ensureFieldAccessorsInitialized(
209               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
210     }
211 
212     public static com.google.protobuf.Parser<Authorizations> PARSER =
213         new com.google.protobuf.AbstractParser<Authorizations>() {
214       public Authorizations parsePartialFrom(
215           com.google.protobuf.CodedInputStream input,
216           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
217           throws com.google.protobuf.InvalidProtocolBufferException {
218         return new Authorizations(input, extensionRegistry);
219       }
220     };
221 
222     @java.lang.Override
getParserForType()223     public com.google.protobuf.Parser<Authorizations> getParserForType() {
224       return PARSER;
225     }
226 
227     // repeated string label = 1;
228     public static final int LABEL_FIELD_NUMBER = 1;
229     private com.google.protobuf.LazyStringList label_;
230     /**
231      * <code>repeated string label = 1;</code>
232      */
233     public java.util.List<java.lang.String>
getLabelList()234         getLabelList() {
235       return label_;
236     }
237     /**
238      * <code>repeated string label = 1;</code>
239      */
getLabelCount()240     public int getLabelCount() {
241       return label_.size();
242     }
243     /**
244      * <code>repeated string label = 1;</code>
245      */
getLabel(int index)246     public java.lang.String getLabel(int index) {
247       return label_.get(index);
248     }
249     /**
250      * <code>repeated string label = 1;</code>
251      */
252     public com.google.protobuf.ByteString
getLabelBytes(int index)253         getLabelBytes(int index) {
254       return label_.getByteString(index);
255     }
256 
initFields()257     private void initFields() {
258       label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
259     }
260     private byte memoizedIsInitialized = -1;
isInitialized()261     public final boolean isInitialized() {
262       byte isInitialized = memoizedIsInitialized;
263       if (isInitialized != -1) return isInitialized == 1;
264 
265       memoizedIsInitialized = 1;
266       return true;
267     }
268 
writeTo(com.google.protobuf.CodedOutputStream output)269     public void writeTo(com.google.protobuf.CodedOutputStream output)
270                         throws java.io.IOException {
271       getSerializedSize();
272       for (int i = 0; i < label_.size(); i++) {
273         output.writeBytes(1, label_.getByteString(i));
274       }
275       getUnknownFields().writeTo(output);
276     }
277 
278     private int memoizedSerializedSize = -1;
getSerializedSize()279     public int getSerializedSize() {
280       int size = memoizedSerializedSize;
281       if (size != -1) return size;
282 
283       size = 0;
284       {
285         int dataSize = 0;
286         for (int i = 0; i < label_.size(); i++) {
287           dataSize += com.google.protobuf.CodedOutputStream
288             .computeBytesSizeNoTag(label_.getByteString(i));
289         }
290         size += dataSize;
291         size += 1 * getLabelList().size();
292       }
293       size += getUnknownFields().getSerializedSize();
294       memoizedSerializedSize = size;
295       return size;
296     }
297 
298     private static final long serialVersionUID = 0L;
299     @java.lang.Override
writeReplace()300     protected java.lang.Object writeReplace()
301         throws java.io.ObjectStreamException {
302       return super.writeReplace();
303     }
304 
305     @java.lang.Override
equals(final java.lang.Object obj)306     public boolean equals(final java.lang.Object obj) {
307       if (obj == this) {
308        return true;
309       }
310       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)) {
311         return super.equals(obj);
312       }
313       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) obj;
314 
315       boolean result = true;
316       result = result && getLabelList()
317           .equals(other.getLabelList());
318       result = result &&
319           getUnknownFields().equals(other.getUnknownFields());
320       return result;
321     }
322 
323     private int memoizedHashCode = 0;
324     @java.lang.Override
hashCode()325     public int hashCode() {
326       if (memoizedHashCode != 0) {
327         return memoizedHashCode;
328       }
329       int hash = 41;
330       hash = (19 * hash) + getDescriptorForType().hashCode();
331       if (getLabelCount() > 0) {
332         hash = (37 * hash) + LABEL_FIELD_NUMBER;
333         hash = (53 * hash) + getLabelList().hashCode();
334       }
335       hash = (29 * hash) + getUnknownFields().hashCode();
336       memoizedHashCode = hash;
337       return hash;
338     }
339 
parseFrom( com.google.protobuf.ByteString data)340     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
341         com.google.protobuf.ByteString data)
342         throws com.google.protobuf.InvalidProtocolBufferException {
343       return PARSER.parseFrom(data);
344     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)345     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
346         com.google.protobuf.ByteString data,
347         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
348         throws com.google.protobuf.InvalidProtocolBufferException {
349       return PARSER.parseFrom(data, extensionRegistry);
350     }
parseFrom(byte[] data)351     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data)
352         throws com.google.protobuf.InvalidProtocolBufferException {
353       return PARSER.parseFrom(data);
354     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)355     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
356         byte[] data,
357         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
358         throws com.google.protobuf.InvalidProtocolBufferException {
359       return PARSER.parseFrom(data, extensionRegistry);
360     }
parseFrom(java.io.InputStream input)361     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input)
362         throws java.io.IOException {
363       return PARSER.parseFrom(input);
364     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)365     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
366         java.io.InputStream input,
367         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
368         throws java.io.IOException {
369       return PARSER.parseFrom(input, extensionRegistry);
370     }
parseDelimitedFrom(java.io.InputStream input)371     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input)
372         throws java.io.IOException {
373       return PARSER.parseDelimitedFrom(input);
374     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)375     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(
376         java.io.InputStream input,
377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
378         throws java.io.IOException {
379       return PARSER.parseDelimitedFrom(input, extensionRegistry);
380     }
parseFrom( com.google.protobuf.CodedInputStream input)381     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
382         com.google.protobuf.CodedInputStream input)
383         throws java.io.IOException {
384       return PARSER.parseFrom(input);
385     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)386     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(
387         com.google.protobuf.CodedInputStream input,
388         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
389         throws java.io.IOException {
390       return PARSER.parseFrom(input, extensionRegistry);
391     }
392 
newBuilder()393     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()394     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype)395     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype) {
396       return newBuilder().mergeFrom(prototype);
397     }
toBuilder()398     public Builder toBuilder() { return newBuilder(this); }
399 
400     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)401     protected Builder newBuilderForType(
402         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
403       Builder builder = new Builder(parent);
404       return builder;
405     }
406     /**
407      * Protobuf type {@code Authorizations}
408      *
409      * <pre>
410      **
411      * The protocol buffer version of Authorizations.
412      * </pre>
413      */
414     public static final class Builder extends
415         com.google.protobuf.GeneratedMessage.Builder<Builder>
416        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.AuthorizationsOrBuilder {
417       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()418           getDescriptor() {
419         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor;
420       }
421 
422       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()423           internalGetFieldAccessorTable() {
424         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable
425             .ensureFieldAccessorsInitialized(
426                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class);
427       }
428 
429       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.newBuilder()
Builder()430       private Builder() {
431         maybeForceBuilderInitialization();
432       }
433 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)434       private Builder(
435           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
436         super(parent);
437         maybeForceBuilderInitialization();
438       }
maybeForceBuilderInitialization()439       private void maybeForceBuilderInitialization() {
440         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
441         }
442       }
create()443       private static Builder create() {
444         return new Builder();
445       }
446 
clear()447       public Builder clear() {
448         super.clear();
449         label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
450         bitField0_ = (bitField0_ & ~0x00000001);
451         return this;
452       }
453 
clone()454       public Builder clone() {
455         return create().mergeFrom(buildPartial());
456       }
457 
458       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()459           getDescriptorForType() {
460         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor;
461       }
462 
getDefaultInstanceForType()463       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() {
464         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance();
465       }
466 
build()467       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations build() {
468         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = buildPartial();
469         if (!result.isInitialized()) {
470           throw newUninitializedMessageException(result);
471         }
472         return result;
473       }
474 
buildPartial()475       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations buildPartial() {
476         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations(this);
477         int from_bitField0_ = bitField0_;
478         if (((bitField0_ & 0x00000001) == 0x00000001)) {
479           label_ = new com.google.protobuf.UnmodifiableLazyStringList(
480               label_);
481           bitField0_ = (bitField0_ & ~0x00000001);
482         }
483         result.label_ = label_;
484         onBuilt();
485         return result;
486       }
487 
mergeFrom(com.google.protobuf.Message other)488       public Builder mergeFrom(com.google.protobuf.Message other) {
489         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) {
490           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)other);
491         } else {
492           super.mergeFrom(other);
493           return this;
494         }
495       }
496 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other)497       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other) {
498         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this;
499         if (!other.label_.isEmpty()) {
500           if (label_.isEmpty()) {
501             label_ = other.label_;
502             bitField0_ = (bitField0_ & ~0x00000001);
503           } else {
504             ensureLabelIsMutable();
505             label_.addAll(other.label_);
506           }
507           onChanged();
508         }
509         this.mergeUnknownFields(other.getUnknownFields());
510         return this;
511       }
512 
isInitialized()513       public final boolean isInitialized() {
514         return true;
515       }
516 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)517       public Builder mergeFrom(
518           com.google.protobuf.CodedInputStream input,
519           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
520           throws java.io.IOException {
521         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parsedMessage = null;
522         try {
523           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
524         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
525           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage();
526           throw e;
527         } finally {
528           if (parsedMessage != null) {
529             mergeFrom(parsedMessage);
530           }
531         }
532         return this;
533       }
534       private int bitField0_;
535 
536       // repeated string label = 1;
537       private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
ensureLabelIsMutable()538       private void ensureLabelIsMutable() {
539         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
540           label_ = new com.google.protobuf.LazyStringArrayList(label_);
541           bitField0_ |= 0x00000001;
542          }
543       }
544       /**
545        * <code>repeated string label = 1;</code>
546        */
547       public java.util.List<java.lang.String>
getLabelList()548           getLabelList() {
549         return java.util.Collections.unmodifiableList(label_);
550       }
551       /**
552        * <code>repeated string label = 1;</code>
553        */
getLabelCount()554       public int getLabelCount() {
555         return label_.size();
556       }
557       /**
558        * <code>repeated string label = 1;</code>
559        */
getLabel(int index)560       public java.lang.String getLabel(int index) {
561         return label_.get(index);
562       }
563       /**
564        * <code>repeated string label = 1;</code>
565        */
566       public com.google.protobuf.ByteString
getLabelBytes(int index)567           getLabelBytes(int index) {
568         return label_.getByteString(index);
569       }
570       /**
571        * <code>repeated string label = 1;</code>
572        */
setLabel( int index, java.lang.String value)573       public Builder setLabel(
574           int index, java.lang.String value) {
575         if (value == null) {
576     throw new NullPointerException();
577   }
578   ensureLabelIsMutable();
579         label_.set(index, value);
580         onChanged();
581         return this;
582       }
583       /**
584        * <code>repeated string label = 1;</code>
585        */
addLabel( java.lang.String value)586       public Builder addLabel(
587           java.lang.String value) {
588         if (value == null) {
589     throw new NullPointerException();
590   }
591   ensureLabelIsMutable();
592         label_.add(value);
593         onChanged();
594         return this;
595       }
596       /**
597        * <code>repeated string label = 1;</code>
598        */
addAllLabel( java.lang.Iterable<java.lang.String> values)599       public Builder addAllLabel(
600           java.lang.Iterable<java.lang.String> values) {
601         ensureLabelIsMutable();
602         super.addAll(values, label_);
603         onChanged();
604         return this;
605       }
606       /**
607        * <code>repeated string label = 1;</code>
608        */
clearLabel()609       public Builder clearLabel() {
610         label_ = com.google.protobuf.LazyStringArrayList.EMPTY;
611         bitField0_ = (bitField0_ & ~0x00000001);
612         onChanged();
613         return this;
614       }
615       /**
616        * <code>repeated string label = 1;</code>
617        */
addLabelBytes( com.google.protobuf.ByteString value)618       public Builder addLabelBytes(
619           com.google.protobuf.ByteString value) {
620         if (value == null) {
621     throw new NullPointerException();
622   }
623   ensureLabelIsMutable();
624         label_.add(value);
625         onChanged();
626         return this;
627       }
628 
629       // @@protoc_insertion_point(builder_scope:Authorizations)
630     }
631 
632     static {
633       defaultInstance = new Authorizations(true);
defaultInstance.initFields()634       defaultInstance.initFields();
635     }
636 
637     // @@protoc_insertion_point(class_scope:Authorizations)
638   }
639 
640   public interface CellVisibilityOrBuilder
641       extends com.google.protobuf.MessageOrBuilder {
642 
643     // required string expression = 1;
644     /**
645      * <code>required string expression = 1;</code>
646      */
hasExpression()647     boolean hasExpression();
648     /**
649      * <code>required string expression = 1;</code>
650      */
getExpression()651     java.lang.String getExpression();
652     /**
653      * <code>required string expression = 1;</code>
654      */
655     com.google.protobuf.ByteString
getExpressionBytes()656         getExpressionBytes();
657   }
658   /**
659    * Protobuf type {@code CellVisibility}
660    *
661    * <pre>
662    **
663    * The protocol buffer version of CellVisibility.
664    * </pre>
665    */
666   public static final class CellVisibility extends
667       com.google.protobuf.GeneratedMessage
668       implements CellVisibilityOrBuilder {
669     // Use CellVisibility.newBuilder() to construct.
CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder)670     private CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
671       super(builder);
672       this.unknownFields = builder.getUnknownFields();
673     }
CellVisibility(boolean noInit)674     private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
675 
676     private static final CellVisibility defaultInstance;
getDefaultInstance()677     public static CellVisibility getDefaultInstance() {
678       return defaultInstance;
679     }
680 
getDefaultInstanceForType()681     public CellVisibility getDefaultInstanceForType() {
682       return defaultInstance;
683     }
684 
685     private final com.google.protobuf.UnknownFieldSet unknownFields;
686     @java.lang.Override
687     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()688         getUnknownFields() {
689       return this.unknownFields;
690     }
CellVisibility( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)691     private CellVisibility(
692         com.google.protobuf.CodedInputStream input,
693         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
694         throws com.google.protobuf.InvalidProtocolBufferException {
695       initFields();
696       int mutable_bitField0_ = 0;
697       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
698           com.google.protobuf.UnknownFieldSet.newBuilder();
699       try {
700         boolean done = false;
701         while (!done) {
702           int tag = input.readTag();
703           switch (tag) {
704             case 0:
705               done = true;
706               break;
707             default: {
708               if (!parseUnknownField(input, unknownFields,
709                                      extensionRegistry, tag)) {
710                 done = true;
711               }
712               break;
713             }
714             case 10: {
715               bitField0_ |= 0x00000001;
716               expression_ = input.readBytes();
717               break;
718             }
719           }
720         }
721       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
722         throw e.setUnfinishedMessage(this);
723       } catch (java.io.IOException e) {
724         throw new com.google.protobuf.InvalidProtocolBufferException(
725             e.getMessage()).setUnfinishedMessage(this);
726       } finally {
727         this.unknownFields = unknownFields.build();
728         makeExtensionsImmutable();
729       }
730     }
731     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()732         getDescriptor() {
733       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor;
734     }
735 
736     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()737         internalGetFieldAccessorTable() {
738       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable
739           .ensureFieldAccessorsInitialized(
740               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
741     }
742 
743     public static com.google.protobuf.Parser<CellVisibility> PARSER =
744         new com.google.protobuf.AbstractParser<CellVisibility>() {
745       public CellVisibility parsePartialFrom(
746           com.google.protobuf.CodedInputStream input,
747           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
748           throws com.google.protobuf.InvalidProtocolBufferException {
749         return new CellVisibility(input, extensionRegistry);
750       }
751     };
752 
753     @java.lang.Override
getParserForType()754     public com.google.protobuf.Parser<CellVisibility> getParserForType() {
755       return PARSER;
756     }
757 
758     private int bitField0_;
759     // required string expression = 1;
760     public static final int EXPRESSION_FIELD_NUMBER = 1;
761     private java.lang.Object expression_;
762     /**
763      * <code>required string expression = 1;</code>
764      */
hasExpression()765     public boolean hasExpression() {
766       return ((bitField0_ & 0x00000001) == 0x00000001);
767     }
768     /**
769      * <code>required string expression = 1;</code>
770      */
getExpression()771     public java.lang.String getExpression() {
772       java.lang.Object ref = expression_;
773       if (ref instanceof java.lang.String) {
774         return (java.lang.String) ref;
775       } else {
776         com.google.protobuf.ByteString bs =
777             (com.google.protobuf.ByteString) ref;
778         java.lang.String s = bs.toStringUtf8();
779         if (bs.isValidUtf8()) {
780           expression_ = s;
781         }
782         return s;
783       }
784     }
785     /**
786      * <code>required string expression = 1;</code>
787      */
788     public com.google.protobuf.ByteString
getExpressionBytes()789         getExpressionBytes() {
790       java.lang.Object ref = expression_;
791       if (ref instanceof java.lang.String) {
792         com.google.protobuf.ByteString b =
793             com.google.protobuf.ByteString.copyFromUtf8(
794                 (java.lang.String) ref);
795         expression_ = b;
796         return b;
797       } else {
798         return (com.google.protobuf.ByteString) ref;
799       }
800     }
801 
initFields()802     private void initFields() {
803       expression_ = "";
804     }
805     private byte memoizedIsInitialized = -1;
isInitialized()806     public final boolean isInitialized() {
807       byte isInitialized = memoizedIsInitialized;
808       if (isInitialized != -1) return isInitialized == 1;
809 
810       if (!hasExpression()) {
811         memoizedIsInitialized = 0;
812         return false;
813       }
814       memoizedIsInitialized = 1;
815       return true;
816     }
817 
writeTo(com.google.protobuf.CodedOutputStream output)818     public void writeTo(com.google.protobuf.CodedOutputStream output)
819                         throws java.io.IOException {
820       getSerializedSize();
821       if (((bitField0_ & 0x00000001) == 0x00000001)) {
822         output.writeBytes(1, getExpressionBytes());
823       }
824       getUnknownFields().writeTo(output);
825     }
826 
827     private int memoizedSerializedSize = -1;
getSerializedSize()828     public int getSerializedSize() {
829       int size = memoizedSerializedSize;
830       if (size != -1) return size;
831 
832       size = 0;
833       if (((bitField0_ & 0x00000001) == 0x00000001)) {
834         size += com.google.protobuf.CodedOutputStream
835           .computeBytesSize(1, getExpressionBytes());
836       }
837       size += getUnknownFields().getSerializedSize();
838       memoizedSerializedSize = size;
839       return size;
840     }
841 
842     private static final long serialVersionUID = 0L;
843     @java.lang.Override
writeReplace()844     protected java.lang.Object writeReplace()
845         throws java.io.ObjectStreamException {
846       return super.writeReplace();
847     }
848 
849     @java.lang.Override
equals(final java.lang.Object obj)850     public boolean equals(final java.lang.Object obj) {
851       if (obj == this) {
852        return true;
853       }
854       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)) {
855         return super.equals(obj);
856       }
857       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) obj;
858 
859       boolean result = true;
860       result = result && (hasExpression() == other.hasExpression());
861       if (hasExpression()) {
862         result = result && getExpression()
863             .equals(other.getExpression());
864       }
865       result = result &&
866           getUnknownFields().equals(other.getUnknownFields());
867       return result;
868     }
869 
870     private int memoizedHashCode = 0;
871     @java.lang.Override
hashCode()872     public int hashCode() {
873       if (memoizedHashCode != 0) {
874         return memoizedHashCode;
875       }
876       int hash = 41;
877       hash = (19 * hash) + getDescriptorForType().hashCode();
878       if (hasExpression()) {
879         hash = (37 * hash) + EXPRESSION_FIELD_NUMBER;
880         hash = (53 * hash) + getExpression().hashCode();
881       }
882       hash = (29 * hash) + getUnknownFields().hashCode();
883       memoizedHashCode = hash;
884       return hash;
885     }
886 
parseFrom( com.google.protobuf.ByteString data)887     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
888         com.google.protobuf.ByteString data)
889         throws com.google.protobuf.InvalidProtocolBufferException {
890       return PARSER.parseFrom(data);
891     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)892     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
893         com.google.protobuf.ByteString data,
894         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
895         throws com.google.protobuf.InvalidProtocolBufferException {
896       return PARSER.parseFrom(data, extensionRegistry);
897     }
parseFrom(byte[] data)898     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data)
899         throws com.google.protobuf.InvalidProtocolBufferException {
900       return PARSER.parseFrom(data);
901     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)902     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
903         byte[] data,
904         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
905         throws com.google.protobuf.InvalidProtocolBufferException {
906       return PARSER.parseFrom(data, extensionRegistry);
907     }
parseFrom(java.io.InputStream input)908     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input)
909         throws java.io.IOException {
910       return PARSER.parseFrom(input);
911     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)912     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
913         java.io.InputStream input,
914         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
915         throws java.io.IOException {
916       return PARSER.parseFrom(input, extensionRegistry);
917     }
parseDelimitedFrom(java.io.InputStream input)918     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input)
919         throws java.io.IOException {
920       return PARSER.parseDelimitedFrom(input);
921     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)922     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(
923         java.io.InputStream input,
924         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
925         throws java.io.IOException {
926       return PARSER.parseDelimitedFrom(input, extensionRegistry);
927     }
parseFrom( com.google.protobuf.CodedInputStream input)928     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
929         com.google.protobuf.CodedInputStream input)
930         throws java.io.IOException {
931       return PARSER.parseFrom(input);
932     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)933     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(
934         com.google.protobuf.CodedInputStream input,
935         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
936         throws java.io.IOException {
937       return PARSER.parseFrom(input, extensionRegistry);
938     }
939 
newBuilder()940     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()941     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype)942     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype) {
943       return newBuilder().mergeFrom(prototype);
944     }
toBuilder()945     public Builder toBuilder() { return newBuilder(this); }
946 
947     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)948     protected Builder newBuilderForType(
949         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
950       Builder builder = new Builder(parent);
951       return builder;
952     }
953     /**
954      * Protobuf type {@code CellVisibility}
955      *
956      * <pre>
957      **
958      * The protocol buffer version of CellVisibility.
959      * </pre>
960      */
961     public static final class Builder extends
962         com.google.protobuf.GeneratedMessage.Builder<Builder>
963        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibilityOrBuilder {
964       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()965           getDescriptor() {
966         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor;
967       }
968 
969       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()970           internalGetFieldAccessorTable() {
971         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable
972             .ensureFieldAccessorsInitialized(
973                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class);
974       }
975 
976       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.newBuilder()
Builder()977       private Builder() {
978         maybeForceBuilderInitialization();
979       }
980 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)981       private Builder(
982           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
983         super(parent);
984         maybeForceBuilderInitialization();
985       }
maybeForceBuilderInitialization()986       private void maybeForceBuilderInitialization() {
987         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
988         }
989       }
create()990       private static Builder create() {
991         return new Builder();
992       }
993 
clear()994       public Builder clear() {
995         super.clear();
996         expression_ = "";
997         bitField0_ = (bitField0_ & ~0x00000001);
998         return this;
999       }
1000 
clone()1001       public Builder clone() {
1002         return create().mergeFrom(buildPartial());
1003       }
1004 
1005       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1006           getDescriptorForType() {
1007         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor;
1008       }
1009 
getDefaultInstanceForType()1010       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() {
1011         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance();
1012       }
1013 
build()1014       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility build() {
1015         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = buildPartial();
1016         if (!result.isInitialized()) {
1017           throw newUninitializedMessageException(result);
1018         }
1019         return result;
1020       }
1021 
buildPartial()1022       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility buildPartial() {
1023         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility(this);
1024         int from_bitField0_ = bitField0_;
1025         int to_bitField0_ = 0;
1026         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1027           to_bitField0_ |= 0x00000001;
1028         }
1029         result.expression_ = expression_;
1030         result.bitField0_ = to_bitField0_;
1031         onBuilt();
1032         return result;
1033       }
1034 
mergeFrom(com.google.protobuf.Message other)1035       public Builder mergeFrom(com.google.protobuf.Message other) {
1036         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) {
1037           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)other);
1038         } else {
1039           super.mergeFrom(other);
1040           return this;
1041         }
1042       }
1043 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other)1044       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other) {
1045         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this;
1046         if (other.hasExpression()) {
1047           bitField0_ |= 0x00000001;
1048           expression_ = other.expression_;
1049           onChanged();
1050         }
1051         this.mergeUnknownFields(other.getUnknownFields());
1052         return this;
1053       }
1054 
isInitialized()1055       public final boolean isInitialized() {
1056         if (!hasExpression()) {
1057 
1058           return false;
1059         }
1060         return true;
1061       }
1062 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1063       public Builder mergeFrom(
1064           com.google.protobuf.CodedInputStream input,
1065           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1066           throws java.io.IOException {
1067         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null;
1068         try {
1069           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1070         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1071           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage();
1072           throw e;
1073         } finally {
1074           if (parsedMessage != null) {
1075             mergeFrom(parsedMessage);
1076           }
1077         }
1078         return this;
1079       }
1080       private int bitField0_;
1081 
1082       // required string expression = 1;
1083       private java.lang.Object expression_ = "";
1084       /**
1085        * <code>required string expression = 1;</code>
1086        */
hasExpression()1087       public boolean hasExpression() {
1088         return ((bitField0_ & 0x00000001) == 0x00000001);
1089       }
1090       /**
1091        * <code>required string expression = 1;</code>
1092        */
getExpression()1093       public java.lang.String getExpression() {
1094         java.lang.Object ref = expression_;
1095         if (!(ref instanceof java.lang.String)) {
1096           java.lang.String s = ((com.google.protobuf.ByteString) ref)
1097               .toStringUtf8();
1098           expression_ = s;
1099           return s;
1100         } else {
1101           return (java.lang.String) ref;
1102         }
1103       }
1104       /**
1105        * <code>required string expression = 1;</code>
1106        */
1107       public com.google.protobuf.ByteString
getExpressionBytes()1108           getExpressionBytes() {
1109         java.lang.Object ref = expression_;
1110         if (ref instanceof String) {
1111           com.google.protobuf.ByteString b =
1112               com.google.protobuf.ByteString.copyFromUtf8(
1113                   (java.lang.String) ref);
1114           expression_ = b;
1115           return b;
1116         } else {
1117           return (com.google.protobuf.ByteString) ref;
1118         }
1119       }
1120       /**
1121        * <code>required string expression = 1;</code>
1122        */
setExpression( java.lang.String value)1123       public Builder setExpression(
1124           java.lang.String value) {
1125         if (value == null) {
1126     throw new NullPointerException();
1127   }
1128   bitField0_ |= 0x00000001;
1129         expression_ = value;
1130         onChanged();
1131         return this;
1132       }
1133       /**
1134        * <code>required string expression = 1;</code>
1135        */
clearExpression()1136       public Builder clearExpression() {
1137         bitField0_ = (bitField0_ & ~0x00000001);
1138         expression_ = getDefaultInstance().getExpression();
1139         onChanged();
1140         return this;
1141       }
1142       /**
1143        * <code>required string expression = 1;</code>
1144        */
setExpressionBytes( com.google.protobuf.ByteString value)1145       public Builder setExpressionBytes(
1146           com.google.protobuf.ByteString value) {
1147         if (value == null) {
1148     throw new NullPointerException();
1149   }
1150   bitField0_ |= 0x00000001;
1151         expression_ = value;
1152         onChanged();
1153         return this;
1154       }
1155 
1156       // @@protoc_insertion_point(builder_scope:CellVisibility)
1157     }
1158 
1159     static {
1160       defaultInstance = new CellVisibility(true);
defaultInstance.initFields()1161       defaultInstance.initFields();
1162     }
1163 
1164     // @@protoc_insertion_point(class_scope:CellVisibility)
1165   }
1166 
1167   public interface ColumnOrBuilder
1168       extends com.google.protobuf.MessageOrBuilder {
1169 
1170     // required bytes family = 1;
1171     /**
1172      * <code>required bytes family = 1;</code>
1173      */
hasFamily()1174     boolean hasFamily();
1175     /**
1176      * <code>required bytes family = 1;</code>
1177      */
getFamily()1178     com.google.protobuf.ByteString getFamily();
1179 
1180     // repeated bytes qualifier = 2;
1181     /**
1182      * <code>repeated bytes qualifier = 2;</code>
1183      */
getQualifierList()1184     java.util.List<com.google.protobuf.ByteString> getQualifierList();
1185     /**
1186      * <code>repeated bytes qualifier = 2;</code>
1187      */
getQualifierCount()1188     int getQualifierCount();
1189     /**
1190      * <code>repeated bytes qualifier = 2;</code>
1191      */
getQualifier(int index)1192     com.google.protobuf.ByteString getQualifier(int index);
1193   }
1194   /**
1195    * Protobuf type {@code Column}
1196    *
1197    * <pre>
1198    **
1199    * Container for a list of column qualifier names of a family.
1200    * </pre>
1201    */
1202   public static final class Column extends
1203       com.google.protobuf.GeneratedMessage
1204       implements ColumnOrBuilder {
1205     // Use Column.newBuilder() to construct.
Column(com.google.protobuf.GeneratedMessage.Builder<?> builder)1206     private Column(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1207       super(builder);
1208       this.unknownFields = builder.getUnknownFields();
1209     }
Column(boolean noInit)1210     private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1211 
1212     private static final Column defaultInstance;
getDefaultInstance()1213     public static Column getDefaultInstance() {
1214       return defaultInstance;
1215     }
1216 
getDefaultInstanceForType()1217     public Column getDefaultInstanceForType() {
1218       return defaultInstance;
1219     }
1220 
1221     private final com.google.protobuf.UnknownFieldSet unknownFields;
1222     @java.lang.Override
1223     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1224         getUnknownFields() {
1225       return this.unknownFields;
1226     }
Column( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1227     private Column(
1228         com.google.protobuf.CodedInputStream input,
1229         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1230         throws com.google.protobuf.InvalidProtocolBufferException {
1231       initFields();
1232       int mutable_bitField0_ = 0;
1233       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1234           com.google.protobuf.UnknownFieldSet.newBuilder();
1235       try {
1236         boolean done = false;
1237         while (!done) {
1238           int tag = input.readTag();
1239           switch (tag) {
1240             case 0:
1241               done = true;
1242               break;
1243             default: {
1244               if (!parseUnknownField(input, unknownFields,
1245                                      extensionRegistry, tag)) {
1246                 done = true;
1247               }
1248               break;
1249             }
1250             case 10: {
1251               bitField0_ |= 0x00000001;
1252               family_ = input.readBytes();
1253               break;
1254             }
1255             case 18: {
1256               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1257                 qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
1258                 mutable_bitField0_ |= 0x00000002;
1259               }
1260               qualifier_.add(input.readBytes());
1261               break;
1262             }
1263           }
1264         }
1265       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1266         throw e.setUnfinishedMessage(this);
1267       } catch (java.io.IOException e) {
1268         throw new com.google.protobuf.InvalidProtocolBufferException(
1269             e.getMessage()).setUnfinishedMessage(this);
1270       } finally {
1271         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
1272           qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
1273         }
1274         this.unknownFields = unknownFields.build();
1275         makeExtensionsImmutable();
1276       }
1277     }
1278     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1279         getDescriptor() {
1280       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor;
1281     }
1282 
1283     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1284         internalGetFieldAccessorTable() {
1285       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable
1286           .ensureFieldAccessorsInitialized(
1287               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
1288     }
1289 
1290     public static com.google.protobuf.Parser<Column> PARSER =
1291         new com.google.protobuf.AbstractParser<Column>() {
1292       public Column parsePartialFrom(
1293           com.google.protobuf.CodedInputStream input,
1294           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1295           throws com.google.protobuf.InvalidProtocolBufferException {
1296         return new Column(input, extensionRegistry);
1297       }
1298     };
1299 
1300     @java.lang.Override
getParserForType()1301     public com.google.protobuf.Parser<Column> getParserForType() {
1302       return PARSER;
1303     }
1304 
1305     private int bitField0_;
1306     // required bytes family = 1;
1307     public static final int FAMILY_FIELD_NUMBER = 1;
1308     private com.google.protobuf.ByteString family_;
1309     /**
1310      * <code>required bytes family = 1;</code>
1311      */
hasFamily()1312     public boolean hasFamily() {
1313       return ((bitField0_ & 0x00000001) == 0x00000001);
1314     }
1315     /**
1316      * <code>required bytes family = 1;</code>
1317      */
getFamily()1318     public com.google.protobuf.ByteString getFamily() {
1319       return family_;
1320     }
1321 
1322     // repeated bytes qualifier = 2;
1323     public static final int QUALIFIER_FIELD_NUMBER = 2;
1324     private java.util.List<com.google.protobuf.ByteString> qualifier_;
1325     /**
1326      * <code>repeated bytes qualifier = 2;</code>
1327      */
1328     public java.util.List<com.google.protobuf.ByteString>
getQualifierList()1329         getQualifierList() {
1330       return qualifier_;
1331     }
1332     /**
1333      * <code>repeated bytes qualifier = 2;</code>
1334      */
getQualifierCount()1335     public int getQualifierCount() {
1336       return qualifier_.size();
1337     }
1338     /**
1339      * <code>repeated bytes qualifier = 2;</code>
1340      */
getQualifier(int index)1341     public com.google.protobuf.ByteString getQualifier(int index) {
1342       return qualifier_.get(index);
1343     }
1344 
initFields()1345     private void initFields() {
1346       family_ = com.google.protobuf.ByteString.EMPTY;
1347       qualifier_ = java.util.Collections.emptyList();
1348     }
1349     private byte memoizedIsInitialized = -1;
isInitialized()1350     public final boolean isInitialized() {
1351       byte isInitialized = memoizedIsInitialized;
1352       if (isInitialized != -1) return isInitialized == 1;
1353 
1354       if (!hasFamily()) {
1355         memoizedIsInitialized = 0;
1356         return false;
1357       }
1358       memoizedIsInitialized = 1;
1359       return true;
1360     }
1361 
writeTo(com.google.protobuf.CodedOutputStream output)1362     public void writeTo(com.google.protobuf.CodedOutputStream output)
1363                         throws java.io.IOException {
1364       getSerializedSize();
1365       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1366         output.writeBytes(1, family_);
1367       }
1368       for (int i = 0; i < qualifier_.size(); i++) {
1369         output.writeBytes(2, qualifier_.get(i));
1370       }
1371       getUnknownFields().writeTo(output);
1372     }
1373 
1374     private int memoizedSerializedSize = -1;
getSerializedSize()1375     public int getSerializedSize() {
1376       int size = memoizedSerializedSize;
1377       if (size != -1) return size;
1378 
1379       size = 0;
1380       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1381         size += com.google.protobuf.CodedOutputStream
1382           .computeBytesSize(1, family_);
1383       }
1384       {
1385         int dataSize = 0;
1386         for (int i = 0; i < qualifier_.size(); i++) {
1387           dataSize += com.google.protobuf.CodedOutputStream
1388             .computeBytesSizeNoTag(qualifier_.get(i));
1389         }
1390         size += dataSize;
1391         size += 1 * getQualifierList().size();
1392       }
1393       size += getUnknownFields().getSerializedSize();
1394       memoizedSerializedSize = size;
1395       return size;
1396     }
1397 
1398     private static final long serialVersionUID = 0L;
1399     @java.lang.Override
writeReplace()1400     protected java.lang.Object writeReplace()
1401         throws java.io.ObjectStreamException {
1402       return super.writeReplace();
1403     }
1404 
1405     @java.lang.Override
equals(final java.lang.Object obj)1406     public boolean equals(final java.lang.Object obj) {
1407       if (obj == this) {
1408        return true;
1409       }
1410       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) {
1411         return super.equals(obj);
1412       }
1413       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj;
1414 
1415       boolean result = true;
1416       result = result && (hasFamily() == other.hasFamily());
1417       if (hasFamily()) {
1418         result = result && getFamily()
1419             .equals(other.getFamily());
1420       }
1421       result = result && getQualifierList()
1422           .equals(other.getQualifierList());
1423       result = result &&
1424           getUnknownFields().equals(other.getUnknownFields());
1425       return result;
1426     }
1427 
1428     private int memoizedHashCode = 0;
1429     @java.lang.Override
hashCode()1430     public int hashCode() {
1431       if (memoizedHashCode != 0) {
1432         return memoizedHashCode;
1433       }
1434       int hash = 41;
1435       hash = (19 * hash) + getDescriptorForType().hashCode();
1436       if (hasFamily()) {
1437         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
1438         hash = (53 * hash) + getFamily().hashCode();
1439       }
1440       if (getQualifierCount() > 0) {
1441         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
1442         hash = (53 * hash) + getQualifierList().hashCode();
1443       }
1444       hash = (29 * hash) + getUnknownFields().hashCode();
1445       memoizedHashCode = hash;
1446       return hash;
1447     }
1448 
parseFrom( com.google.protobuf.ByteString data)1449     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1450         com.google.protobuf.ByteString data)
1451         throws com.google.protobuf.InvalidProtocolBufferException {
1452       return PARSER.parseFrom(data);
1453     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1454     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1455         com.google.protobuf.ByteString data,
1456         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1457         throws com.google.protobuf.InvalidProtocolBufferException {
1458       return PARSER.parseFrom(data, extensionRegistry);
1459     }
parseFrom(byte[] data)1460     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data)
1461         throws com.google.protobuf.InvalidProtocolBufferException {
1462       return PARSER.parseFrom(data);
1463     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1464     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1465         byte[] data,
1466         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1467         throws com.google.protobuf.InvalidProtocolBufferException {
1468       return PARSER.parseFrom(data, extensionRegistry);
1469     }
parseFrom(java.io.InputStream input)1470     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input)
1471         throws java.io.IOException {
1472       return PARSER.parseFrom(input);
1473     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1474     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1475         java.io.InputStream input,
1476         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1477         throws java.io.IOException {
1478       return PARSER.parseFrom(input, extensionRegistry);
1479     }
parseDelimitedFrom(java.io.InputStream input)1480     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input)
1481         throws java.io.IOException {
1482       return PARSER.parseDelimitedFrom(input);
1483     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1484     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(
1485         java.io.InputStream input,
1486         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1487         throws java.io.IOException {
1488       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1489     }
parseFrom( com.google.protobuf.CodedInputStream input)1490     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1491         com.google.protobuf.CodedInputStream input)
1492         throws java.io.IOException {
1493       return PARSER.parseFrom(input);
1494     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1495     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(
1496         com.google.protobuf.CodedInputStream input,
1497         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1498         throws java.io.IOException {
1499       return PARSER.parseFrom(input, extensionRegistry);
1500     }
1501 
newBuilder()1502     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1503     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype)1504     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) {
1505       return newBuilder().mergeFrom(prototype);
1506     }
toBuilder()1507     public Builder toBuilder() { return newBuilder(this); }
1508 
1509     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1510     protected Builder newBuilderForType(
1511         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1512       Builder builder = new Builder(parent);
1513       return builder;
1514     }
1515     /**
1516      * Protobuf type {@code Column}
1517      *
1518      * <pre>
1519      **
1520      * Container for a list of column qualifier names of a family.
1521      * </pre>
1522      */
1523     public static final class Builder extends
1524         com.google.protobuf.GeneratedMessage.Builder<Builder>
1525        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder {
1526       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1527           getDescriptor() {
1528         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor;
1529       }
1530 
1531       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1532           internalGetFieldAccessorTable() {
1533         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable
1534             .ensureFieldAccessorsInitialized(
1535                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class);
1536       }
1537 
1538       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder()
Builder()1539       private Builder() {
1540         maybeForceBuilderInitialization();
1541       }
1542 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1543       private Builder(
1544           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1545         super(parent);
1546         maybeForceBuilderInitialization();
1547       }
maybeForceBuilderInitialization()1548       private void maybeForceBuilderInitialization() {
1549         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1550         }
1551       }
create()1552       private static Builder create() {
1553         return new Builder();
1554       }
1555 
clear()1556       public Builder clear() {
1557         super.clear();
1558         family_ = com.google.protobuf.ByteString.EMPTY;
1559         bitField0_ = (bitField0_ & ~0x00000001);
1560         qualifier_ = java.util.Collections.emptyList();
1561         bitField0_ = (bitField0_ & ~0x00000002);
1562         return this;
1563       }
1564 
clone()1565       public Builder clone() {
1566         return create().mergeFrom(buildPartial());
1567       }
1568 
1569       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1570           getDescriptorForType() {
1571         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor;
1572       }
1573 
getDefaultInstanceForType()1574       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() {
1575         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance();
1576       }
1577 
build()1578       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() {
1579         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial();
1580         if (!result.isInitialized()) {
1581           throw newUninitializedMessageException(result);
1582         }
1583         return result;
1584       }
1585 
buildPartial()1586       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() {
1587         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this);
1588         int from_bitField0_ = bitField0_;
1589         int to_bitField0_ = 0;
1590         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1591           to_bitField0_ |= 0x00000001;
1592         }
1593         result.family_ = family_;
1594         if (((bitField0_ & 0x00000002) == 0x00000002)) {
1595           qualifier_ = java.util.Collections.unmodifiableList(qualifier_);
1596           bitField0_ = (bitField0_ & ~0x00000002);
1597         }
1598         result.qualifier_ = qualifier_;
1599         result.bitField0_ = to_bitField0_;
1600         onBuilt();
1601         return result;
1602       }
1603 
mergeFrom(com.google.protobuf.Message other)1604       public Builder mergeFrom(com.google.protobuf.Message other) {
1605         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) {
1606           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other);
1607         } else {
1608           super.mergeFrom(other);
1609           return this;
1610         }
1611       }
1612 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other)1613       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) {
1614         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this;
1615         if (other.hasFamily()) {
1616           setFamily(other.getFamily());
1617         }
1618         if (!other.qualifier_.isEmpty()) {
1619           if (qualifier_.isEmpty()) {
1620             qualifier_ = other.qualifier_;
1621             bitField0_ = (bitField0_ & ~0x00000002);
1622           } else {
1623             ensureQualifierIsMutable();
1624             qualifier_.addAll(other.qualifier_);
1625           }
1626           onChanged();
1627         }
1628         this.mergeUnknownFields(other.getUnknownFields());
1629         return this;
1630       }
1631 
isInitialized()1632       public final boolean isInitialized() {
1633         if (!hasFamily()) {
1634 
1635           return false;
1636         }
1637         return true;
1638       }
1639 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1640       public Builder mergeFrom(
1641           com.google.protobuf.CodedInputStream input,
1642           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1643           throws java.io.IOException {
1644         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parsedMessage = null;
1645         try {
1646           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1647         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1648           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage();
1649           throw e;
1650         } finally {
1651           if (parsedMessage != null) {
1652             mergeFrom(parsedMessage);
1653           }
1654         }
1655         return this;
1656       }
1657       private int bitField0_;
1658 
1659       // required bytes family = 1;
1660       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
1661       /**
1662        * <code>required bytes family = 1;</code>
1663        */
hasFamily()1664       public boolean hasFamily() {
1665         return ((bitField0_ & 0x00000001) == 0x00000001);
1666       }
1667       /**
1668        * <code>required bytes family = 1;</code>
1669        */
getFamily()1670       public com.google.protobuf.ByteString getFamily() {
1671         return family_;
1672       }
1673       /**
1674        * <code>required bytes family = 1;</code>
1675        */
setFamily(com.google.protobuf.ByteString value)1676       public Builder setFamily(com.google.protobuf.ByteString value) {
1677         if (value == null) {
1678     throw new NullPointerException();
1679   }
1680   bitField0_ |= 0x00000001;
1681         family_ = value;
1682         onChanged();
1683         return this;
1684       }
1685       /**
1686        * <code>required bytes family = 1;</code>
1687        */
clearFamily()1688       public Builder clearFamily() {
1689         bitField0_ = (bitField0_ & ~0x00000001);
1690         family_ = getDefaultInstance().getFamily();
1691         onChanged();
1692         return this;
1693       }
1694 
1695       // repeated bytes qualifier = 2;
1696       private java.util.List<com.google.protobuf.ByteString> qualifier_ = java.util.Collections.emptyList();
ensureQualifierIsMutable()1697       private void ensureQualifierIsMutable() {
1698         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
1699           qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifier_);
1700           bitField0_ |= 0x00000002;
1701          }
1702       }
1703       /**
1704        * <code>repeated bytes qualifier = 2;</code>
1705        */
1706       public java.util.List<com.google.protobuf.ByteString>
getQualifierList()1707           getQualifierList() {
1708         return java.util.Collections.unmodifiableList(qualifier_);
1709       }
1710       /**
1711        * <code>repeated bytes qualifier = 2;</code>
1712        */
getQualifierCount()1713       public int getQualifierCount() {
1714         return qualifier_.size();
1715       }
1716       /**
1717        * <code>repeated bytes qualifier = 2;</code>
1718        */
getQualifier(int index)1719       public com.google.protobuf.ByteString getQualifier(int index) {
1720         return qualifier_.get(index);
1721       }
1722       /**
1723        * <code>repeated bytes qualifier = 2;</code>
1724        */
setQualifier( int index, com.google.protobuf.ByteString value)1725       public Builder setQualifier(
1726           int index, com.google.protobuf.ByteString value) {
1727         if (value == null) {
1728     throw new NullPointerException();
1729   }
1730   ensureQualifierIsMutable();
1731         qualifier_.set(index, value);
1732         onChanged();
1733         return this;
1734       }
1735       /**
1736        * <code>repeated bytes qualifier = 2;</code>
1737        */
addQualifier(com.google.protobuf.ByteString value)1738       public Builder addQualifier(com.google.protobuf.ByteString value) {
1739         if (value == null) {
1740     throw new NullPointerException();
1741   }
1742   ensureQualifierIsMutable();
1743         qualifier_.add(value);
1744         onChanged();
1745         return this;
1746       }
1747       /**
1748        * <code>repeated bytes qualifier = 2;</code>
1749        */
addAllQualifier( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)1750       public Builder addAllQualifier(
1751           java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
1752         ensureQualifierIsMutable();
1753         super.addAll(values, qualifier_);
1754         onChanged();
1755         return this;
1756       }
1757       /**
1758        * <code>repeated bytes qualifier = 2;</code>
1759        */
clearQualifier()1760       public Builder clearQualifier() {
1761         qualifier_ = java.util.Collections.emptyList();
1762         bitField0_ = (bitField0_ & ~0x00000002);
1763         onChanged();
1764         return this;
1765       }
1766 
1767       // @@protoc_insertion_point(builder_scope:Column)
1768     }
1769 
1770     static {
1771       defaultInstance = new Column(true);
defaultInstance.initFields()1772       defaultInstance.initFields();
1773     }
1774 
1775     // @@protoc_insertion_point(class_scope:Column)
1776   }
1777 
1778   public interface GetOrBuilder
1779       extends com.google.protobuf.MessageOrBuilder {
1780 
1781     // required bytes row = 1;
1782     /**
1783      * <code>required bytes row = 1;</code>
1784      */
hasRow()1785     boolean hasRow();
1786     /**
1787      * <code>required bytes row = 1;</code>
1788      */
getRow()1789     com.google.protobuf.ByteString getRow();
1790 
1791     // repeated .Column column = 2;
1792     /**
1793      * <code>repeated .Column column = 2;</code>
1794      */
1795     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>
getColumnList()1796         getColumnList();
1797     /**
1798      * <code>repeated .Column column = 2;</code>
1799      */
getColumn(int index)1800     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
1801     /**
1802      * <code>repeated .Column column = 2;</code>
1803      */
getColumnCount()1804     int getColumnCount();
1805     /**
1806      * <code>repeated .Column column = 2;</code>
1807      */
1808     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList()1809         getColumnOrBuilderList();
1810     /**
1811      * <code>repeated .Column column = 2;</code>
1812      */
getColumnOrBuilder( int index)1813     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
1814         int index);
1815 
1816     // repeated .NameBytesPair attribute = 3;
1817     /**
1818      * <code>repeated .NameBytesPair attribute = 3;</code>
1819      */
1820     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>
getAttributeList()1821         getAttributeList();
1822     /**
1823      * <code>repeated .NameBytesPair attribute = 3;</code>
1824      */
getAttribute(int index)1825     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
1826     /**
1827      * <code>repeated .NameBytesPair attribute = 3;</code>
1828      */
getAttributeCount()1829     int getAttributeCount();
1830     /**
1831      * <code>repeated .NameBytesPair attribute = 3;</code>
1832      */
1833     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()1834         getAttributeOrBuilderList();
1835     /**
1836      * <code>repeated .NameBytesPair attribute = 3;</code>
1837      */
getAttributeOrBuilder( int index)1838     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
1839         int index);
1840 
1841     // optional .Filter filter = 4;
1842     /**
1843      * <code>optional .Filter filter = 4;</code>
1844      */
hasFilter()1845     boolean hasFilter();
1846     /**
1847      * <code>optional .Filter filter = 4;</code>
1848      */
getFilter()1849     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
1850     /**
1851      * <code>optional .Filter filter = 4;</code>
1852      */
getFilterOrBuilder()1853     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
1854 
1855     // optional .TimeRange time_range = 5;
1856     /**
1857      * <code>optional .TimeRange time_range = 5;</code>
1858      */
hasTimeRange()1859     boolean hasTimeRange();
1860     /**
1861      * <code>optional .TimeRange time_range = 5;</code>
1862      */
getTimeRange()1863     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
1864     /**
1865      * <code>optional .TimeRange time_range = 5;</code>
1866      */
getTimeRangeOrBuilder()1867     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
1868 
1869     // optional uint32 max_versions = 6 [default = 1];
1870     /**
1871      * <code>optional uint32 max_versions = 6 [default = 1];</code>
1872      */
hasMaxVersions()1873     boolean hasMaxVersions();
1874     /**
1875      * <code>optional uint32 max_versions = 6 [default = 1];</code>
1876      */
getMaxVersions()1877     int getMaxVersions();
1878 
1879     // optional bool cache_blocks = 7 [default = true];
1880     /**
1881      * <code>optional bool cache_blocks = 7 [default = true];</code>
1882      */
hasCacheBlocks()1883     boolean hasCacheBlocks();
1884     /**
1885      * <code>optional bool cache_blocks = 7 [default = true];</code>
1886      */
getCacheBlocks()1887     boolean getCacheBlocks();
1888 
1889     // optional uint32 store_limit = 8;
1890     /**
1891      * <code>optional uint32 store_limit = 8;</code>
1892      */
hasStoreLimit()1893     boolean hasStoreLimit();
1894     /**
1895      * <code>optional uint32 store_limit = 8;</code>
1896      */
getStoreLimit()1897     int getStoreLimit();
1898 
1899     // optional uint32 store_offset = 9;
1900     /**
1901      * <code>optional uint32 store_offset = 9;</code>
1902      */
hasStoreOffset()1903     boolean hasStoreOffset();
1904     /**
1905      * <code>optional uint32 store_offset = 9;</code>
1906      */
getStoreOffset()1907     int getStoreOffset();
1908 
1909     // optional bool existence_only = 10 [default = false];
1910     /**
1911      * <code>optional bool existence_only = 10 [default = false];</code>
1912      *
1913      * <pre>
1914      * The result isn't asked for, just check for
1915      * the existence.
1916      * </pre>
1917      */
hasExistenceOnly()1918     boolean hasExistenceOnly();
1919     /**
1920      * <code>optional bool existence_only = 10 [default = false];</code>
1921      *
1922      * <pre>
1923      * The result isn't asked for, just check for
1924      * the existence.
1925      * </pre>
1926      */
getExistenceOnly()1927     boolean getExistenceOnly();
1928 
1929     // optional bool closest_row_before = 11 [default = false];
1930     /**
1931      * <code>optional bool closest_row_before = 11 [default = false];</code>
1932      *
1933      * <pre>
1934      * If the row to get doesn't exist, return the
1935      * closest row before.
1936      * </pre>
1937      */
hasClosestRowBefore()1938     boolean hasClosestRowBefore();
1939     /**
1940      * <code>optional bool closest_row_before = 11 [default = false];</code>
1941      *
1942      * <pre>
1943      * If the row to get doesn't exist, return the
1944      * closest row before.
1945      * </pre>
1946      */
getClosestRowBefore()1947     boolean getClosestRowBefore();
1948 
1949     // optional .Consistency consistency = 12 [default = STRONG];
1950     /**
1951      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
1952      */
hasConsistency()1953     boolean hasConsistency();
1954     /**
1955      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
1956      */
getConsistency()1957     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
1958 
1959     // repeated .ColumnFamilyTimeRange cf_time_range = 13;
1960     /**
1961      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
1962      */
1963     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>
getCfTimeRangeList()1964         getCfTimeRangeList();
1965     /**
1966      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
1967      */
getCfTimeRange(int index)1968     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index);
1969     /**
1970      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
1971      */
getCfTimeRangeCount()1972     int getCfTimeRangeCount();
1973     /**
1974      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
1975      */
1976     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList()1977         getCfTimeRangeOrBuilderList();
1978     /**
1979      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
1980      */
getCfTimeRangeOrBuilder( int index)1981     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
1982         int index);
1983   }
1984   /**
1985    * Protobuf type {@code Get}
1986    *
1987    * <pre>
1988    **
1989    * The protocol buffer version of Get.
1990    * Unless existence_only is specified, return all the requested data
1991    * for the row that matches exactly, or the one that immediately
1992    * precedes it if closest_row_before is specified.
1993    * </pre>
1994    */
1995   public static final class Get extends
1996       com.google.protobuf.GeneratedMessage
1997       implements GetOrBuilder {
1998     // Use Get.newBuilder() to construct.
Get(com.google.protobuf.GeneratedMessage.Builder<?> builder)1999     private Get(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2000       super(builder);
2001       this.unknownFields = builder.getUnknownFields();
2002     }
Get(boolean noInit)2003     private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2004 
2005     private static final Get defaultInstance;
getDefaultInstance()2006     public static Get getDefaultInstance() {
2007       return defaultInstance;
2008     }
2009 
getDefaultInstanceForType()2010     public Get getDefaultInstanceForType() {
2011       return defaultInstance;
2012     }
2013 
2014     private final com.google.protobuf.UnknownFieldSet unknownFields;
2015     @java.lang.Override
2016     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2017         getUnknownFields() {
2018       return this.unknownFields;
2019     }
Get( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2020     private Get(
2021         com.google.protobuf.CodedInputStream input,
2022         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2023         throws com.google.protobuf.InvalidProtocolBufferException {
2024       initFields();
2025       int mutable_bitField0_ = 0;
2026       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2027           com.google.protobuf.UnknownFieldSet.newBuilder();
2028       try {
2029         boolean done = false;
2030         while (!done) {
2031           int tag = input.readTag();
2032           switch (tag) {
2033             case 0:
2034               done = true;
2035               break;
2036             default: {
2037               if (!parseUnknownField(input, unknownFields,
2038                                      extensionRegistry, tag)) {
2039                 done = true;
2040               }
2041               break;
2042             }
2043             case 10: {
2044               bitField0_ |= 0x00000001;
2045               row_ = input.readBytes();
2046               break;
2047             }
2048             case 18: {
2049               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
2050                 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
2051                 mutable_bitField0_ |= 0x00000002;
2052               }
2053               column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
2054               break;
2055             }
2056             case 26: {
2057               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
2058                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
2059                 mutable_bitField0_ |= 0x00000004;
2060               }
2061               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
2062               break;
2063             }
2064             case 34: {
2065               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
2066               if (((bitField0_ & 0x00000002) == 0x00000002)) {
2067                 subBuilder = filter_.toBuilder();
2068               }
2069               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
2070               if (subBuilder != null) {
2071                 subBuilder.mergeFrom(filter_);
2072                 filter_ = subBuilder.buildPartial();
2073               }
2074               bitField0_ |= 0x00000002;
2075               break;
2076             }
2077             case 42: {
2078               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
2079               if (((bitField0_ & 0x00000004) == 0x00000004)) {
2080                 subBuilder = timeRange_.toBuilder();
2081               }
2082               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
2083               if (subBuilder != null) {
2084                 subBuilder.mergeFrom(timeRange_);
2085                 timeRange_ = subBuilder.buildPartial();
2086               }
2087               bitField0_ |= 0x00000004;
2088               break;
2089             }
2090             case 48: {
2091               bitField0_ |= 0x00000008;
2092               maxVersions_ = input.readUInt32();
2093               break;
2094             }
2095             case 56: {
2096               bitField0_ |= 0x00000010;
2097               cacheBlocks_ = input.readBool();
2098               break;
2099             }
2100             case 64: {
2101               bitField0_ |= 0x00000020;
2102               storeLimit_ = input.readUInt32();
2103               break;
2104             }
2105             case 72: {
2106               bitField0_ |= 0x00000040;
2107               storeOffset_ = input.readUInt32();
2108               break;
2109             }
2110             case 80: {
2111               bitField0_ |= 0x00000080;
2112               existenceOnly_ = input.readBool();
2113               break;
2114             }
2115             case 88: {
2116               bitField0_ |= 0x00000100;
2117               closestRowBefore_ = input.readBool();
2118               break;
2119             }
2120             case 96: {
2121               int rawValue = input.readEnum();
2122               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
2123               if (value == null) {
2124                 unknownFields.mergeVarintField(12, rawValue);
2125               } else {
2126                 bitField0_ |= 0x00000200;
2127                 consistency_ = value;
2128               }
2129               break;
2130             }
2131             case 106: {
2132               if (!((mutable_bitField0_ & 0x00001000) == 0x00001000)) {
2133                 cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>();
2134                 mutable_bitField0_ |= 0x00001000;
2135               }
2136               cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry));
2137               break;
2138             }
2139           }
2140         }
2141       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2142         throw e.setUnfinishedMessage(this);
2143       } catch (java.io.IOException e) {
2144         throw new com.google.protobuf.InvalidProtocolBufferException(
2145             e.getMessage()).setUnfinishedMessage(this);
2146       } finally {
2147         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
2148           column_ = java.util.Collections.unmodifiableList(column_);
2149         }
2150         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
2151           attribute_ = java.util.Collections.unmodifiableList(attribute_);
2152         }
2153         if (((mutable_bitField0_ & 0x00001000) == 0x00001000)) {
2154           cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
2155         }
2156         this.unknownFields = unknownFields.build();
2157         makeExtensionsImmutable();
2158       }
2159     }
2160     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2161         getDescriptor() {
2162       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor;
2163     }
2164 
2165     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2166         internalGetFieldAccessorTable() {
2167       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable
2168           .ensureFieldAccessorsInitialized(
2169               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
2170     }
2171 
2172     public static com.google.protobuf.Parser<Get> PARSER =
2173         new com.google.protobuf.AbstractParser<Get>() {
2174       public Get parsePartialFrom(
2175           com.google.protobuf.CodedInputStream input,
2176           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2177           throws com.google.protobuf.InvalidProtocolBufferException {
2178         return new Get(input, extensionRegistry);
2179       }
2180     };
2181 
2182     @java.lang.Override
getParserForType()2183     public com.google.protobuf.Parser<Get> getParserForType() {
2184       return PARSER;
2185     }
2186 
2187     private int bitField0_;
2188     // required bytes row = 1;
2189     public static final int ROW_FIELD_NUMBER = 1;
2190     private com.google.protobuf.ByteString row_;
2191     /**
2192      * <code>required bytes row = 1;</code>
2193      */
hasRow()2194     public boolean hasRow() {
2195       return ((bitField0_ & 0x00000001) == 0x00000001);
2196     }
2197     /**
2198      * <code>required bytes row = 1;</code>
2199      */
getRow()2200     public com.google.protobuf.ByteString getRow() {
2201       return row_;
2202     }
2203 
2204     // repeated .Column column = 2;
2205     public static final int COLUMN_FIELD_NUMBER = 2;
2206     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
2207     /**
2208      * <code>repeated .Column column = 2;</code>
2209      */
getColumnList()2210     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
2211       return column_;
2212     }
2213     /**
2214      * <code>repeated .Column column = 2;</code>
2215      */
2216     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList()2217         getColumnOrBuilderList() {
2218       return column_;
2219     }
2220     /**
2221      * <code>repeated .Column column = 2;</code>
2222      */
getColumnCount()2223     public int getColumnCount() {
2224       return column_.size();
2225     }
2226     /**
2227      * <code>repeated .Column column = 2;</code>
2228      */
getColumn(int index)2229     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
2230       return column_.get(index);
2231     }
2232     /**
2233      * <code>repeated .Column column = 2;</code>
2234      */
getColumnOrBuilder( int index)2235     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
2236         int index) {
2237       return column_.get(index);
2238     }
2239 
2240     // repeated .NameBytesPair attribute = 3;
2241     public static final int ATTRIBUTE_FIELD_NUMBER = 3;
2242     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
2243     /**
2244      * <code>repeated .NameBytesPair attribute = 3;</code>
2245      */
getAttributeList()2246     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
2247       return attribute_;
2248     }
2249     /**
2250      * <code>repeated .NameBytesPair attribute = 3;</code>
2251      */
2252     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()2253         getAttributeOrBuilderList() {
2254       return attribute_;
2255     }
2256     /**
2257      * <code>repeated .NameBytesPair attribute = 3;</code>
2258      */
getAttributeCount()2259     public int getAttributeCount() {
2260       return attribute_.size();
2261     }
2262     /**
2263      * <code>repeated .NameBytesPair attribute = 3;</code>
2264      */
getAttribute(int index)2265     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
2266       return attribute_.get(index);
2267     }
2268     /**
2269      * <code>repeated .NameBytesPair attribute = 3;</code>
2270      */
getAttributeOrBuilder( int index)2271     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
2272         int index) {
2273       return attribute_.get(index);
2274     }
2275 
2276     // optional .Filter filter = 4;
2277     public static final int FILTER_FIELD_NUMBER = 4;
2278     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
2279     /**
2280      * <code>optional .Filter filter = 4;</code>
2281      */
hasFilter()2282     public boolean hasFilter() {
2283       return ((bitField0_ & 0x00000002) == 0x00000002);
2284     }
2285     /**
2286      * <code>optional .Filter filter = 4;</code>
2287      */
getFilter()2288     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
2289       return filter_;
2290     }
2291     /**
2292      * <code>optional .Filter filter = 4;</code>
2293      */
getFilterOrBuilder()2294     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
2295       return filter_;
2296     }
2297 
2298     // optional .TimeRange time_range = 5;
2299     public static final int TIME_RANGE_FIELD_NUMBER = 5;
2300     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
2301     /**
2302      * <code>optional .TimeRange time_range = 5;</code>
2303      */
hasTimeRange()2304     public boolean hasTimeRange() {
2305       return ((bitField0_ & 0x00000004) == 0x00000004);
2306     }
2307     /**
2308      * <code>optional .TimeRange time_range = 5;</code>
2309      */
getTimeRange()2310     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
2311       return timeRange_;
2312     }
2313     /**
2314      * <code>optional .TimeRange time_range = 5;</code>
2315      */
getTimeRangeOrBuilder()2316     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
2317       return timeRange_;
2318     }
2319 
2320     // optional uint32 max_versions = 6 [default = 1];
2321     public static final int MAX_VERSIONS_FIELD_NUMBER = 6;
2322     private int maxVersions_;
2323     /**
2324      * <code>optional uint32 max_versions = 6 [default = 1];</code>
2325      */
hasMaxVersions()2326     public boolean hasMaxVersions() {
2327       return ((bitField0_ & 0x00000008) == 0x00000008);
2328     }
2329     /**
2330      * <code>optional uint32 max_versions = 6 [default = 1];</code>
2331      */
getMaxVersions()2332     public int getMaxVersions() {
2333       return maxVersions_;
2334     }
2335 
2336     // optional bool cache_blocks = 7 [default = true];
2337     public static final int CACHE_BLOCKS_FIELD_NUMBER = 7;
2338     private boolean cacheBlocks_;
2339     /**
2340      * <code>optional bool cache_blocks = 7 [default = true];</code>
2341      */
hasCacheBlocks()2342     public boolean hasCacheBlocks() {
2343       return ((bitField0_ & 0x00000010) == 0x00000010);
2344     }
2345     /**
2346      * <code>optional bool cache_blocks = 7 [default = true];</code>
2347      */
getCacheBlocks()2348     public boolean getCacheBlocks() {
2349       return cacheBlocks_;
2350     }
2351 
2352     // optional uint32 store_limit = 8;
2353     public static final int STORE_LIMIT_FIELD_NUMBER = 8;
2354     private int storeLimit_;
2355     /**
2356      * <code>optional uint32 store_limit = 8;</code>
2357      */
hasStoreLimit()2358     public boolean hasStoreLimit() {
2359       return ((bitField0_ & 0x00000020) == 0x00000020);
2360     }
2361     /**
2362      * <code>optional uint32 store_limit = 8;</code>
2363      */
getStoreLimit()2364     public int getStoreLimit() {
2365       return storeLimit_;
2366     }
2367 
2368     // optional uint32 store_offset = 9;
2369     public static final int STORE_OFFSET_FIELD_NUMBER = 9;
2370     private int storeOffset_;
2371     /**
2372      * <code>optional uint32 store_offset = 9;</code>
2373      */
hasStoreOffset()2374     public boolean hasStoreOffset() {
2375       return ((bitField0_ & 0x00000040) == 0x00000040);
2376     }
2377     /**
2378      * <code>optional uint32 store_offset = 9;</code>
2379      */
getStoreOffset()2380     public int getStoreOffset() {
2381       return storeOffset_;
2382     }
2383 
2384     // optional bool existence_only = 10 [default = false];
2385     public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10;
2386     private boolean existenceOnly_;
2387     /**
2388      * <code>optional bool existence_only = 10 [default = false];</code>
2389      *
2390      * <pre>
2391      * The result isn't asked for, just check for
2392      * the existence.
2393      * </pre>
2394      */
hasExistenceOnly()2395     public boolean hasExistenceOnly() {
2396       return ((bitField0_ & 0x00000080) == 0x00000080);
2397     }
2398     /**
2399      * <code>optional bool existence_only = 10 [default = false];</code>
2400      *
2401      * <pre>
2402      * The result isn't asked for, just check for
2403      * the existence.
2404      * </pre>
2405      */
getExistenceOnly()2406     public boolean getExistenceOnly() {
2407       return existenceOnly_;
2408     }
2409 
2410     // optional bool closest_row_before = 11 [default = false];
2411     public static final int CLOSEST_ROW_BEFORE_FIELD_NUMBER = 11;
2412     private boolean closestRowBefore_;
2413     /**
2414      * <code>optional bool closest_row_before = 11 [default = false];</code>
2415      *
2416      * <pre>
2417      * If the row to get doesn't exist, return the
2418      * closest row before.
2419      * </pre>
2420      */
hasClosestRowBefore()2421     public boolean hasClosestRowBefore() {
2422       return ((bitField0_ & 0x00000100) == 0x00000100);
2423     }
2424     /**
2425      * <code>optional bool closest_row_before = 11 [default = false];</code>
2426      *
2427      * <pre>
2428      * If the row to get doesn't exist, return the
2429      * closest row before.
2430      * </pre>
2431      */
getClosestRowBefore()2432     public boolean getClosestRowBefore() {
2433       return closestRowBefore_;
2434     }
2435 
2436     // optional .Consistency consistency = 12 [default = STRONG];
2437     public static final int CONSISTENCY_FIELD_NUMBER = 12;
2438     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
2439     /**
2440      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
2441      */
hasConsistency()2442     public boolean hasConsistency() {
2443       return ((bitField0_ & 0x00000200) == 0x00000200);
2444     }
2445     /**
2446      * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
2447      */
getConsistency()2448     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
2449       return consistency_;
2450     }
2451 
2452     // repeated .ColumnFamilyTimeRange cf_time_range = 13;
2453     public static final int CF_TIME_RANGE_FIELD_NUMBER = 13;
2454     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_;
2455     /**
2456      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
2457      */
getCfTimeRangeList()2458     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
2459       return cfTimeRange_;
2460     }
2461     /**
2462      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
2463      */
2464     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList()2465         getCfTimeRangeOrBuilderList() {
2466       return cfTimeRange_;
2467     }
2468     /**
2469      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
2470      */
getCfTimeRangeCount()2471     public int getCfTimeRangeCount() {
2472       return cfTimeRange_.size();
2473     }
2474     /**
2475      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
2476      */
getCfTimeRange(int index)2477     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
2478       return cfTimeRange_.get(index);
2479     }
2480     /**
2481      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
2482      */
getCfTimeRangeOrBuilder( int index)2483     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
2484         int index) {
2485       return cfTimeRange_.get(index);
2486     }
2487 
initFields()2488     private void initFields() {
2489       row_ = com.google.protobuf.ByteString.EMPTY;
2490       column_ = java.util.Collections.emptyList();
2491       attribute_ = java.util.Collections.emptyList();
2492       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
2493       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
2494       maxVersions_ = 1;
2495       cacheBlocks_ = true;
2496       storeLimit_ = 0;
2497       storeOffset_ = 0;
2498       existenceOnly_ = false;
2499       closestRowBefore_ = false;
2500       consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
2501       cfTimeRange_ = java.util.Collections.emptyList();
2502     }
2503     private byte memoizedIsInitialized = -1;
isInitialized()2504     public final boolean isInitialized() {
2505       byte isInitialized = memoizedIsInitialized;
2506       if (isInitialized != -1) return isInitialized == 1;
2507 
2508       if (!hasRow()) {
2509         memoizedIsInitialized = 0;
2510         return false;
2511       }
2512       for (int i = 0; i < getColumnCount(); i++) {
2513         if (!getColumn(i).isInitialized()) {
2514           memoizedIsInitialized = 0;
2515           return false;
2516         }
2517       }
2518       for (int i = 0; i < getAttributeCount(); i++) {
2519         if (!getAttribute(i).isInitialized()) {
2520           memoizedIsInitialized = 0;
2521           return false;
2522         }
2523       }
2524       if (hasFilter()) {
2525         if (!getFilter().isInitialized()) {
2526           memoizedIsInitialized = 0;
2527           return false;
2528         }
2529       }
2530       for (int i = 0; i < getCfTimeRangeCount(); i++) {
2531         if (!getCfTimeRange(i).isInitialized()) {
2532           memoizedIsInitialized = 0;
2533           return false;
2534         }
2535       }
2536       memoizedIsInitialized = 1;
2537       return true;
2538     }
2539 
writeTo(com.google.protobuf.CodedOutputStream output)2540     public void writeTo(com.google.protobuf.CodedOutputStream output)
2541                         throws java.io.IOException {
2542       getSerializedSize();
2543       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2544         output.writeBytes(1, row_);
2545       }
2546       for (int i = 0; i < column_.size(); i++) {
2547         output.writeMessage(2, column_.get(i));
2548       }
2549       for (int i = 0; i < attribute_.size(); i++) {
2550         output.writeMessage(3, attribute_.get(i));
2551       }
2552       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2553         output.writeMessage(4, filter_);
2554       }
2555       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2556         output.writeMessage(5, timeRange_);
2557       }
2558       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2559         output.writeUInt32(6, maxVersions_);
2560       }
2561       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2562         output.writeBool(7, cacheBlocks_);
2563       }
2564       if (((bitField0_ & 0x00000020) == 0x00000020)) {
2565         output.writeUInt32(8, storeLimit_);
2566       }
2567       if (((bitField0_ & 0x00000040) == 0x00000040)) {
2568         output.writeUInt32(9, storeOffset_);
2569       }
2570       if (((bitField0_ & 0x00000080) == 0x00000080)) {
2571         output.writeBool(10, existenceOnly_);
2572       }
2573       if (((bitField0_ & 0x00000100) == 0x00000100)) {
2574         output.writeBool(11, closestRowBefore_);
2575       }
2576       if (((bitField0_ & 0x00000200) == 0x00000200)) {
2577         output.writeEnum(12, consistency_.getNumber());
2578       }
2579       for (int i = 0; i < cfTimeRange_.size(); i++) {
2580         output.writeMessage(13, cfTimeRange_.get(i));
2581       }
2582       getUnknownFields().writeTo(output);
2583     }
2584 
2585     private int memoizedSerializedSize = -1;
getSerializedSize()2586     public int getSerializedSize() {
2587       int size = memoizedSerializedSize;
2588       if (size != -1) return size;
2589 
2590       size = 0;
2591       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2592         size += com.google.protobuf.CodedOutputStream
2593           .computeBytesSize(1, row_);
2594       }
2595       for (int i = 0; i < column_.size(); i++) {
2596         size += com.google.protobuf.CodedOutputStream
2597           .computeMessageSize(2, column_.get(i));
2598       }
2599       for (int i = 0; i < attribute_.size(); i++) {
2600         size += com.google.protobuf.CodedOutputStream
2601           .computeMessageSize(3, attribute_.get(i));
2602       }
2603       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2604         size += com.google.protobuf.CodedOutputStream
2605           .computeMessageSize(4, filter_);
2606       }
2607       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2608         size += com.google.protobuf.CodedOutputStream
2609           .computeMessageSize(5, timeRange_);
2610       }
2611       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2612         size += com.google.protobuf.CodedOutputStream
2613           .computeUInt32Size(6, maxVersions_);
2614       }
2615       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2616         size += com.google.protobuf.CodedOutputStream
2617           .computeBoolSize(7, cacheBlocks_);
2618       }
2619       if (((bitField0_ & 0x00000020) == 0x00000020)) {
2620         size += com.google.protobuf.CodedOutputStream
2621           .computeUInt32Size(8, storeLimit_);
2622       }
2623       if (((bitField0_ & 0x00000040) == 0x00000040)) {
2624         size += com.google.protobuf.CodedOutputStream
2625           .computeUInt32Size(9, storeOffset_);
2626       }
2627       if (((bitField0_ & 0x00000080) == 0x00000080)) {
2628         size += com.google.protobuf.CodedOutputStream
2629           .computeBoolSize(10, existenceOnly_);
2630       }
2631       if (((bitField0_ & 0x00000100) == 0x00000100)) {
2632         size += com.google.protobuf.CodedOutputStream
2633           .computeBoolSize(11, closestRowBefore_);
2634       }
2635       if (((bitField0_ & 0x00000200) == 0x00000200)) {
2636         size += com.google.protobuf.CodedOutputStream
2637           .computeEnumSize(12, consistency_.getNumber());
2638       }
2639       for (int i = 0; i < cfTimeRange_.size(); i++) {
2640         size += com.google.protobuf.CodedOutputStream
2641           .computeMessageSize(13, cfTimeRange_.get(i));
2642       }
2643       size += getUnknownFields().getSerializedSize();
2644       memoizedSerializedSize = size;
2645       return size;
2646     }
2647 
2648     private static final long serialVersionUID = 0L;
2649     @java.lang.Override
writeReplace()2650     protected java.lang.Object writeReplace()
2651         throws java.io.ObjectStreamException {
2652       return super.writeReplace();
2653     }
2654 
2655     @java.lang.Override
equals(final java.lang.Object obj)2656     public boolean equals(final java.lang.Object obj) {
2657       if (obj == this) {
2658        return true;
2659       }
2660       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) {
2661         return super.equals(obj);
2662       }
2663       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj;
2664 
2665       boolean result = true;
2666       result = result && (hasRow() == other.hasRow());
2667       if (hasRow()) {
2668         result = result && getRow()
2669             .equals(other.getRow());
2670       }
2671       result = result && getColumnList()
2672           .equals(other.getColumnList());
2673       result = result && getAttributeList()
2674           .equals(other.getAttributeList());
2675       result = result && (hasFilter() == other.hasFilter());
2676       if (hasFilter()) {
2677         result = result && getFilter()
2678             .equals(other.getFilter());
2679       }
2680       result = result && (hasTimeRange() == other.hasTimeRange());
2681       if (hasTimeRange()) {
2682         result = result && getTimeRange()
2683             .equals(other.getTimeRange());
2684       }
2685       result = result && (hasMaxVersions() == other.hasMaxVersions());
2686       if (hasMaxVersions()) {
2687         result = result && (getMaxVersions()
2688             == other.getMaxVersions());
2689       }
2690       result = result && (hasCacheBlocks() == other.hasCacheBlocks());
2691       if (hasCacheBlocks()) {
2692         result = result && (getCacheBlocks()
2693             == other.getCacheBlocks());
2694       }
2695       result = result && (hasStoreLimit() == other.hasStoreLimit());
2696       if (hasStoreLimit()) {
2697         result = result && (getStoreLimit()
2698             == other.getStoreLimit());
2699       }
2700       result = result && (hasStoreOffset() == other.hasStoreOffset());
2701       if (hasStoreOffset()) {
2702         result = result && (getStoreOffset()
2703             == other.getStoreOffset());
2704       }
2705       result = result && (hasExistenceOnly() == other.hasExistenceOnly());
2706       if (hasExistenceOnly()) {
2707         result = result && (getExistenceOnly()
2708             == other.getExistenceOnly());
2709       }
2710       result = result && (hasClosestRowBefore() == other.hasClosestRowBefore());
2711       if (hasClosestRowBefore()) {
2712         result = result && (getClosestRowBefore()
2713             == other.getClosestRowBefore());
2714       }
2715       result = result && (hasConsistency() == other.hasConsistency());
2716       if (hasConsistency()) {
2717         result = result &&
2718             (getConsistency() == other.getConsistency());
2719       }
2720       result = result && getCfTimeRangeList()
2721           .equals(other.getCfTimeRangeList());
2722       result = result &&
2723           getUnknownFields().equals(other.getUnknownFields());
2724       return result;
2725     }
2726 
2727     private int memoizedHashCode = 0;
2728     @java.lang.Override
hashCode()2729     public int hashCode() {
2730       if (memoizedHashCode != 0) {
2731         return memoizedHashCode;
2732       }
2733       int hash = 41;
2734       hash = (19 * hash) + getDescriptorForType().hashCode();
2735       if (hasRow()) {
2736         hash = (37 * hash) + ROW_FIELD_NUMBER;
2737         hash = (53 * hash) + getRow().hashCode();
2738       }
2739       if (getColumnCount() > 0) {
2740         hash = (37 * hash) + COLUMN_FIELD_NUMBER;
2741         hash = (53 * hash) + getColumnList().hashCode();
2742       }
2743       if (getAttributeCount() > 0) {
2744         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
2745         hash = (53 * hash) + getAttributeList().hashCode();
2746       }
2747       if (hasFilter()) {
2748         hash = (37 * hash) + FILTER_FIELD_NUMBER;
2749         hash = (53 * hash) + getFilter().hashCode();
2750       }
2751       if (hasTimeRange()) {
2752         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
2753         hash = (53 * hash) + getTimeRange().hashCode();
2754       }
2755       if (hasMaxVersions()) {
2756         hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
2757         hash = (53 * hash) + getMaxVersions();
2758       }
2759       if (hasCacheBlocks()) {
2760         hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
2761         hash = (53 * hash) + hashBoolean(getCacheBlocks());
2762       }
2763       if (hasStoreLimit()) {
2764         hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
2765         hash = (53 * hash) + getStoreLimit();
2766       }
2767       if (hasStoreOffset()) {
2768         hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
2769         hash = (53 * hash) + getStoreOffset();
2770       }
2771       if (hasExistenceOnly()) {
2772         hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER;
2773         hash = (53 * hash) + hashBoolean(getExistenceOnly());
2774       }
2775       if (hasClosestRowBefore()) {
2776         hash = (37 * hash) + CLOSEST_ROW_BEFORE_FIELD_NUMBER;
2777         hash = (53 * hash) + hashBoolean(getClosestRowBefore());
2778       }
2779       if (hasConsistency()) {
2780         hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
2781         hash = (53 * hash) + hashEnum(getConsistency());
2782       }
2783       if (getCfTimeRangeCount() > 0) {
2784         hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER;
2785         hash = (53 * hash) + getCfTimeRangeList().hashCode();
2786       }
2787       hash = (29 * hash) + getUnknownFields().hashCode();
2788       memoizedHashCode = hash;
2789       return hash;
2790     }
2791 
parseFrom( com.google.protobuf.ByteString data)2792     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2793         com.google.protobuf.ByteString data)
2794         throws com.google.protobuf.InvalidProtocolBufferException {
2795       return PARSER.parseFrom(data);
2796     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2797     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2798         com.google.protobuf.ByteString data,
2799         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2800         throws com.google.protobuf.InvalidProtocolBufferException {
2801       return PARSER.parseFrom(data, extensionRegistry);
2802     }
parseFrom(byte[] data)2803     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data)
2804         throws com.google.protobuf.InvalidProtocolBufferException {
2805       return PARSER.parseFrom(data);
2806     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2807     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2808         byte[] data,
2809         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2810         throws com.google.protobuf.InvalidProtocolBufferException {
2811       return PARSER.parseFrom(data, extensionRegistry);
2812     }
parseFrom(java.io.InputStream input)2813     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input)
2814         throws java.io.IOException {
2815       return PARSER.parseFrom(input);
2816     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2817     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2818         java.io.InputStream input,
2819         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2820         throws java.io.IOException {
2821       return PARSER.parseFrom(input, extensionRegistry);
2822     }
parseDelimitedFrom(java.io.InputStream input)2823     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input)
2824         throws java.io.IOException {
2825       return PARSER.parseDelimitedFrom(input);
2826     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2827     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(
2828         java.io.InputStream input,
2829         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2830         throws java.io.IOException {
2831       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2832     }
parseFrom( com.google.protobuf.CodedInputStream input)2833     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2834         com.google.protobuf.CodedInputStream input)
2835         throws java.io.IOException {
2836       return PARSER.parseFrom(input);
2837     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2838     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(
2839         com.google.protobuf.CodedInputStream input,
2840         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2841         throws java.io.IOException {
2842       return PARSER.parseFrom(input, extensionRegistry);
2843     }
2844 
newBuilder()2845     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2846     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype)2847     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) {
2848       return newBuilder().mergeFrom(prototype);
2849     }
toBuilder()2850     public Builder toBuilder() { return newBuilder(this); }
2851 
2852     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2853     protected Builder newBuilderForType(
2854         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2855       Builder builder = new Builder(parent);
2856       return builder;
2857     }
2858     /**
2859      * Protobuf type {@code Get}
2860      *
2861      * <pre>
2862      **
2863      * The protocol buffer version of Get.
2864      * Unless existence_only is specified, return all the requested data
2865      * for the row that matches exactly, or the one that immediately
2866      * precedes it if closest_row_before is specified.
2867      * </pre>
2868      */
2869     public static final class Builder extends
2870         com.google.protobuf.GeneratedMessage.Builder<Builder>
2871        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder {
2872       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2873           getDescriptor() {
2874         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor;
2875       }
2876 
2877       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2878           internalGetFieldAccessorTable() {
2879         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable
2880             .ensureFieldAccessorsInitialized(
2881                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class);
2882       }
2883 
2884       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder()
Builder()2885       private Builder() {
2886         maybeForceBuilderInitialization();
2887       }
2888 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2889       private Builder(
2890           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2891         super(parent);
2892         maybeForceBuilderInitialization();
2893       }
maybeForceBuilderInitialization()2894       private void maybeForceBuilderInitialization() {
2895         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2896           getColumnFieldBuilder();
2897           getAttributeFieldBuilder();
2898           getFilterFieldBuilder();
2899           getTimeRangeFieldBuilder();
2900           getCfTimeRangeFieldBuilder();
2901         }
2902       }
create()2903       private static Builder create() {
2904         return new Builder();
2905       }
2906 
clear()2907       public Builder clear() {
2908         super.clear();
2909         row_ = com.google.protobuf.ByteString.EMPTY;
2910         bitField0_ = (bitField0_ & ~0x00000001);
2911         if (columnBuilder_ == null) {
2912           column_ = java.util.Collections.emptyList();
2913           bitField0_ = (bitField0_ & ~0x00000002);
2914         } else {
2915           columnBuilder_.clear();
2916         }
2917         if (attributeBuilder_ == null) {
2918           attribute_ = java.util.Collections.emptyList();
2919           bitField0_ = (bitField0_ & ~0x00000004);
2920         } else {
2921           attributeBuilder_.clear();
2922         }
2923         if (filterBuilder_ == null) {
2924           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
2925         } else {
2926           filterBuilder_.clear();
2927         }
2928         bitField0_ = (bitField0_ & ~0x00000008);
2929         if (timeRangeBuilder_ == null) {
2930           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
2931         } else {
2932           timeRangeBuilder_.clear();
2933         }
2934         bitField0_ = (bitField0_ & ~0x00000010);
2935         maxVersions_ = 1;
2936         bitField0_ = (bitField0_ & ~0x00000020);
2937         cacheBlocks_ = true;
2938         bitField0_ = (bitField0_ & ~0x00000040);
2939         storeLimit_ = 0;
2940         bitField0_ = (bitField0_ & ~0x00000080);
2941         storeOffset_ = 0;
2942         bitField0_ = (bitField0_ & ~0x00000100);
2943         existenceOnly_ = false;
2944         bitField0_ = (bitField0_ & ~0x00000200);
2945         closestRowBefore_ = false;
2946         bitField0_ = (bitField0_ & ~0x00000400);
2947         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
2948         bitField0_ = (bitField0_ & ~0x00000800);
2949         if (cfTimeRangeBuilder_ == null) {
2950           cfTimeRange_ = java.util.Collections.emptyList();
2951           bitField0_ = (bitField0_ & ~0x00001000);
2952         } else {
2953           cfTimeRangeBuilder_.clear();
2954         }
2955         return this;
2956       }
2957 
clone()2958       public Builder clone() {
2959         return create().mergeFrom(buildPartial());
2960       }
2961 
2962       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()2963           getDescriptorForType() {
2964         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor;
2965       }
2966 
getDefaultInstanceForType()2967       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() {
2968         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
2969       }
2970 
build()2971       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() {
2972         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial();
2973         if (!result.isInitialized()) {
2974           throw newUninitializedMessageException(result);
2975         }
2976         return result;
2977       }
2978 
buildPartial()2979       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() {
2980         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this);
2981         int from_bitField0_ = bitField0_;
2982         int to_bitField0_ = 0;
2983         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2984           to_bitField0_ |= 0x00000001;
2985         }
2986         result.row_ = row_;
2987         if (columnBuilder_ == null) {
2988           if (((bitField0_ & 0x00000002) == 0x00000002)) {
2989             column_ = java.util.Collections.unmodifiableList(column_);
2990             bitField0_ = (bitField0_ & ~0x00000002);
2991           }
2992           result.column_ = column_;
2993         } else {
2994           result.column_ = columnBuilder_.build();
2995         }
2996         if (attributeBuilder_ == null) {
2997           if (((bitField0_ & 0x00000004) == 0x00000004)) {
2998             attribute_ = java.util.Collections.unmodifiableList(attribute_);
2999             bitField0_ = (bitField0_ & ~0x00000004);
3000           }
3001           result.attribute_ = attribute_;
3002         } else {
3003           result.attribute_ = attributeBuilder_.build();
3004         }
3005         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
3006           to_bitField0_ |= 0x00000002;
3007         }
3008         if (filterBuilder_ == null) {
3009           result.filter_ = filter_;
3010         } else {
3011           result.filter_ = filterBuilder_.build();
3012         }
3013         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
3014           to_bitField0_ |= 0x00000004;
3015         }
3016         if (timeRangeBuilder_ == null) {
3017           result.timeRange_ = timeRange_;
3018         } else {
3019           result.timeRange_ = timeRangeBuilder_.build();
3020         }
3021         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
3022           to_bitField0_ |= 0x00000008;
3023         }
3024         result.maxVersions_ = maxVersions_;
3025         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
3026           to_bitField0_ |= 0x00000010;
3027         }
3028         result.cacheBlocks_ = cacheBlocks_;
3029         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
3030           to_bitField0_ |= 0x00000020;
3031         }
3032         result.storeLimit_ = storeLimit_;
3033         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
3034           to_bitField0_ |= 0x00000040;
3035         }
3036         result.storeOffset_ = storeOffset_;
3037         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
3038           to_bitField0_ |= 0x00000080;
3039         }
3040         result.existenceOnly_ = existenceOnly_;
3041         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
3042           to_bitField0_ |= 0x00000100;
3043         }
3044         result.closestRowBefore_ = closestRowBefore_;
3045         if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
3046           to_bitField0_ |= 0x00000200;
3047         }
3048         result.consistency_ = consistency_;
3049         if (cfTimeRangeBuilder_ == null) {
3050           if (((bitField0_ & 0x00001000) == 0x00001000)) {
3051             cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
3052             bitField0_ = (bitField0_ & ~0x00001000);
3053           }
3054           result.cfTimeRange_ = cfTimeRange_;
3055         } else {
3056           result.cfTimeRange_ = cfTimeRangeBuilder_.build();
3057         }
3058         result.bitField0_ = to_bitField0_;
3059         onBuilt();
3060         return result;
3061       }
3062 
mergeFrom(com.google.protobuf.Message other)3063       public Builder mergeFrom(com.google.protobuf.Message other) {
3064         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) {
3065           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other);
3066         } else {
3067           super.mergeFrom(other);
3068           return this;
3069         }
3070       }
3071 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other)3072       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) {
3073         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this;
3074         if (other.hasRow()) {
3075           setRow(other.getRow());
3076         }
3077         if (columnBuilder_ == null) {
3078           if (!other.column_.isEmpty()) {
3079             if (column_.isEmpty()) {
3080               column_ = other.column_;
3081               bitField0_ = (bitField0_ & ~0x00000002);
3082             } else {
3083               ensureColumnIsMutable();
3084               column_.addAll(other.column_);
3085             }
3086             onChanged();
3087           }
3088         } else {
3089           if (!other.column_.isEmpty()) {
3090             if (columnBuilder_.isEmpty()) {
3091               columnBuilder_.dispose();
3092               columnBuilder_ = null;
3093               column_ = other.column_;
3094               bitField0_ = (bitField0_ & ~0x00000002);
3095               columnBuilder_ =
3096                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
3097                    getColumnFieldBuilder() : null;
3098             } else {
3099               columnBuilder_.addAllMessages(other.column_);
3100             }
3101           }
3102         }
3103         if (attributeBuilder_ == null) {
3104           if (!other.attribute_.isEmpty()) {
3105             if (attribute_.isEmpty()) {
3106               attribute_ = other.attribute_;
3107               bitField0_ = (bitField0_ & ~0x00000004);
3108             } else {
3109               ensureAttributeIsMutable();
3110               attribute_.addAll(other.attribute_);
3111             }
3112             onChanged();
3113           }
3114         } else {
3115           if (!other.attribute_.isEmpty()) {
3116             if (attributeBuilder_.isEmpty()) {
3117               attributeBuilder_.dispose();
3118               attributeBuilder_ = null;
3119               attribute_ = other.attribute_;
3120               bitField0_ = (bitField0_ & ~0x00000004);
3121               attributeBuilder_ =
3122                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
3123                    getAttributeFieldBuilder() : null;
3124             } else {
3125               attributeBuilder_.addAllMessages(other.attribute_);
3126             }
3127           }
3128         }
3129         if (other.hasFilter()) {
3130           mergeFilter(other.getFilter());
3131         }
3132         if (other.hasTimeRange()) {
3133           mergeTimeRange(other.getTimeRange());
3134         }
3135         if (other.hasMaxVersions()) {
3136           setMaxVersions(other.getMaxVersions());
3137         }
3138         if (other.hasCacheBlocks()) {
3139           setCacheBlocks(other.getCacheBlocks());
3140         }
3141         if (other.hasStoreLimit()) {
3142           setStoreLimit(other.getStoreLimit());
3143         }
3144         if (other.hasStoreOffset()) {
3145           setStoreOffset(other.getStoreOffset());
3146         }
3147         if (other.hasExistenceOnly()) {
3148           setExistenceOnly(other.getExistenceOnly());
3149         }
3150         if (other.hasClosestRowBefore()) {
3151           setClosestRowBefore(other.getClosestRowBefore());
3152         }
3153         if (other.hasConsistency()) {
3154           setConsistency(other.getConsistency());
3155         }
3156         if (cfTimeRangeBuilder_ == null) {
3157           if (!other.cfTimeRange_.isEmpty()) {
3158             if (cfTimeRange_.isEmpty()) {
3159               cfTimeRange_ = other.cfTimeRange_;
3160               bitField0_ = (bitField0_ & ~0x00001000);
3161             } else {
3162               ensureCfTimeRangeIsMutable();
3163               cfTimeRange_.addAll(other.cfTimeRange_);
3164             }
3165             onChanged();
3166           }
3167         } else {
3168           if (!other.cfTimeRange_.isEmpty()) {
3169             if (cfTimeRangeBuilder_.isEmpty()) {
3170               cfTimeRangeBuilder_.dispose();
3171               cfTimeRangeBuilder_ = null;
3172               cfTimeRange_ = other.cfTimeRange_;
3173               bitField0_ = (bitField0_ & ~0x00001000);
3174               cfTimeRangeBuilder_ =
3175                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
3176                    getCfTimeRangeFieldBuilder() : null;
3177             } else {
3178               cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_);
3179             }
3180           }
3181         }
3182         this.mergeUnknownFields(other.getUnknownFields());
3183         return this;
3184       }
3185 
isInitialized()3186       public final boolean isInitialized() {
3187         if (!hasRow()) {
3188 
3189           return false;
3190         }
3191         for (int i = 0; i < getColumnCount(); i++) {
3192           if (!getColumn(i).isInitialized()) {
3193 
3194             return false;
3195           }
3196         }
3197         for (int i = 0; i < getAttributeCount(); i++) {
3198           if (!getAttribute(i).isInitialized()) {
3199 
3200             return false;
3201           }
3202         }
3203         if (hasFilter()) {
3204           if (!getFilter().isInitialized()) {
3205 
3206             return false;
3207           }
3208         }
3209         for (int i = 0; i < getCfTimeRangeCount(); i++) {
3210           if (!getCfTimeRange(i).isInitialized()) {
3211 
3212             return false;
3213           }
3214         }
3215         return true;
3216       }
3217 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3218       public Builder mergeFrom(
3219           com.google.protobuf.CodedInputStream input,
3220           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3221           throws java.io.IOException {
3222         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parsedMessage = null;
3223         try {
3224           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3225         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3226           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage();
3227           throw e;
3228         } finally {
3229           if (parsedMessage != null) {
3230             mergeFrom(parsedMessage);
3231           }
3232         }
3233         return this;
3234       }
3235       private int bitField0_;
3236 
3237       // required bytes row = 1;
3238       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
3239       /**
3240        * <code>required bytes row = 1;</code>
3241        */
hasRow()3242       public boolean hasRow() {
3243         return ((bitField0_ & 0x00000001) == 0x00000001);
3244       }
3245       /**
3246        * <code>required bytes row = 1;</code>
3247        */
getRow()3248       public com.google.protobuf.ByteString getRow() {
3249         return row_;
3250       }
3251       /**
3252        * <code>required bytes row = 1;</code>
3253        */
setRow(com.google.protobuf.ByteString value)3254       public Builder setRow(com.google.protobuf.ByteString value) {
3255         if (value == null) {
3256     throw new NullPointerException();
3257   }
3258   bitField0_ |= 0x00000001;
3259         row_ = value;
3260         onChanged();
3261         return this;
3262       }
3263       /**
3264        * <code>required bytes row = 1;</code>
3265        */
clearRow()3266       public Builder clearRow() {
3267         bitField0_ = (bitField0_ & ~0x00000001);
3268         row_ = getDefaultInstance().getRow();
3269         onChanged();
3270         return this;
3271       }
3272 
3273       // repeated .Column column = 2;
3274       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
3275         java.util.Collections.emptyList();
ensureColumnIsMutable()3276       private void ensureColumnIsMutable() {
3277         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
3278           column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
3279           bitField0_ |= 0x00000002;
3280          }
3281       }
3282 
3283       private com.google.protobuf.RepeatedFieldBuilder<
3284           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
3285 
3286       /**
3287        * <code>repeated .Column column = 2;</code>
3288        */
getColumnList()3289       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
3290         if (columnBuilder_ == null) {
3291           return java.util.Collections.unmodifiableList(column_);
3292         } else {
3293           return columnBuilder_.getMessageList();
3294         }
3295       }
3296       /**
3297        * <code>repeated .Column column = 2;</code>
3298        */
getColumnCount()3299       public int getColumnCount() {
3300         if (columnBuilder_ == null) {
3301           return column_.size();
3302         } else {
3303           return columnBuilder_.getCount();
3304         }
3305       }
3306       /**
3307        * <code>repeated .Column column = 2;</code>
3308        */
getColumn(int index)3309       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
3310         if (columnBuilder_ == null) {
3311           return column_.get(index);
3312         } else {
3313           return columnBuilder_.getMessage(index);
3314         }
3315       }
3316       /**
3317        * <code>repeated .Column column = 2;</code>
3318        */
setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)3319       public Builder setColumn(
3320           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3321         if (columnBuilder_ == null) {
3322           if (value == null) {
3323             throw new NullPointerException();
3324           }
3325           ensureColumnIsMutable();
3326           column_.set(index, value);
3327           onChanged();
3328         } else {
3329           columnBuilder_.setMessage(index, value);
3330         }
3331         return this;
3332       }
3333       /**
3334        * <code>repeated .Column column = 2;</code>
3335        */
setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)3336       public Builder setColumn(
3337           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3338         if (columnBuilder_ == null) {
3339           ensureColumnIsMutable();
3340           column_.set(index, builderForValue.build());
3341           onChanged();
3342         } else {
3343           columnBuilder_.setMessage(index, builderForValue.build());
3344         }
3345         return this;
3346       }
3347       /**
3348        * <code>repeated .Column column = 2;</code>
3349        */
addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)3350       public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3351         if (columnBuilder_ == null) {
3352           if (value == null) {
3353             throw new NullPointerException();
3354           }
3355           ensureColumnIsMutable();
3356           column_.add(value);
3357           onChanged();
3358         } else {
3359           columnBuilder_.addMessage(value);
3360         }
3361         return this;
3362       }
3363       /**
3364        * <code>repeated .Column column = 2;</code>
3365        */
addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)3366       public Builder addColumn(
3367           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
3368         if (columnBuilder_ == null) {
3369           if (value == null) {
3370             throw new NullPointerException();
3371           }
3372           ensureColumnIsMutable();
3373           column_.add(index, value);
3374           onChanged();
3375         } else {
3376           columnBuilder_.addMessage(index, value);
3377         }
3378         return this;
3379       }
3380       /**
3381        * <code>repeated .Column column = 2;</code>
3382        */
addColumn( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)3383       public Builder addColumn(
3384           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3385         if (columnBuilder_ == null) {
3386           ensureColumnIsMutable();
3387           column_.add(builderForValue.build());
3388           onChanged();
3389         } else {
3390           columnBuilder_.addMessage(builderForValue.build());
3391         }
3392         return this;
3393       }
3394       /**
3395        * <code>repeated .Column column = 2;</code>
3396        */
addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)3397       public Builder addColumn(
3398           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
3399         if (columnBuilder_ == null) {
3400           ensureColumnIsMutable();
3401           column_.add(index, builderForValue.build());
3402           onChanged();
3403         } else {
3404           columnBuilder_.addMessage(index, builderForValue.build());
3405         }
3406         return this;
3407       }
3408       /**
3409        * <code>repeated .Column column = 2;</code>
3410        */
addAllColumn( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values)3411       public Builder addAllColumn(
3412           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
3413         if (columnBuilder_ == null) {
3414           ensureColumnIsMutable();
3415           super.addAll(values, column_);
3416           onChanged();
3417         } else {
3418           columnBuilder_.addAllMessages(values);
3419         }
3420         return this;
3421       }
3422       /**
3423        * <code>repeated .Column column = 2;</code>
3424        */
clearColumn()3425       public Builder clearColumn() {
3426         if (columnBuilder_ == null) {
3427           column_ = java.util.Collections.emptyList();
3428           bitField0_ = (bitField0_ & ~0x00000002);
3429           onChanged();
3430         } else {
3431           columnBuilder_.clear();
3432         }
3433         return this;
3434       }
3435       /**
3436        * <code>repeated .Column column = 2;</code>
3437        */
removeColumn(int index)3438       public Builder removeColumn(int index) {
3439         if (columnBuilder_ == null) {
3440           ensureColumnIsMutable();
3441           column_.remove(index);
3442           onChanged();
3443         } else {
3444           columnBuilder_.remove(index);
3445         }
3446         return this;
3447       }
3448       /**
3449        * <code>repeated .Column column = 2;</code>
3450        */
getColumnBuilder( int index)3451       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
3452           int index) {
3453         return getColumnFieldBuilder().getBuilder(index);
3454       }
3455       /**
3456        * <code>repeated .Column column = 2;</code>
3457        */
getColumnOrBuilder( int index)3458       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
3459           int index) {
3460         if (columnBuilder_ == null) {
3461           return column_.get(index);  } else {
3462           return columnBuilder_.getMessageOrBuilder(index);
3463         }
3464       }
3465       /**
3466        * <code>repeated .Column column = 2;</code>
3467        */
3468       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList()3469            getColumnOrBuilderList() {
3470         if (columnBuilder_ != null) {
3471           return columnBuilder_.getMessageOrBuilderList();
3472         } else {
3473           return java.util.Collections.unmodifiableList(column_);
3474         }
3475       }
3476       /**
3477        * <code>repeated .Column column = 2;</code>
3478        */
addColumnBuilder()3479       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
3480         return getColumnFieldBuilder().addBuilder(
3481             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
3482       }
3483       /**
3484        * <code>repeated .Column column = 2;</code>
3485        */
addColumnBuilder( int index)3486       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
3487           int index) {
3488         return getColumnFieldBuilder().addBuilder(
3489             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
3490       }
3491       /**
3492        * <code>repeated .Column column = 2;</code>
3493        */
3494       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder>
getColumnBuilderList()3495            getColumnBuilderList() {
3496         return getColumnFieldBuilder().getBuilderList();
3497       }
3498       private com.google.protobuf.RepeatedFieldBuilder<
3499           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnFieldBuilder()3500           getColumnFieldBuilder() {
3501         if (columnBuilder_ == null) {
3502           columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3503               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
3504                   column_,
3505                   ((bitField0_ & 0x00000002) == 0x00000002),
3506                   getParentForChildren(),
3507                   isClean());
3508           column_ = null;
3509         }
3510         return columnBuilder_;
3511       }
3512 
3513       // repeated .NameBytesPair attribute = 3;
3514       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
3515         java.util.Collections.emptyList();
ensureAttributeIsMutable()3516       private void ensureAttributeIsMutable() {
3517         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
3518           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
3519           bitField0_ |= 0x00000004;
3520          }
3521       }
3522 
3523       private com.google.protobuf.RepeatedFieldBuilder<
3524           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
3525 
3526       /**
3527        * <code>repeated .NameBytesPair attribute = 3;</code>
3528        */
getAttributeList()3529       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
3530         if (attributeBuilder_ == null) {
3531           return java.util.Collections.unmodifiableList(attribute_);
3532         } else {
3533           return attributeBuilder_.getMessageList();
3534         }
3535       }
3536       /**
3537        * <code>repeated .NameBytesPair attribute = 3;</code>
3538        */
getAttributeCount()3539       public int getAttributeCount() {
3540         if (attributeBuilder_ == null) {
3541           return attribute_.size();
3542         } else {
3543           return attributeBuilder_.getCount();
3544         }
3545       }
3546       /**
3547        * <code>repeated .NameBytesPair attribute = 3;</code>
3548        */
getAttribute(int index)3549       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
3550         if (attributeBuilder_ == null) {
3551           return attribute_.get(index);
3552         } else {
3553           return attributeBuilder_.getMessage(index);
3554         }
3555       }
3556       /**
3557        * <code>repeated .NameBytesPair attribute = 3;</code>
3558        */
setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)3559       public Builder setAttribute(
3560           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3561         if (attributeBuilder_ == null) {
3562           if (value == null) {
3563             throw new NullPointerException();
3564           }
3565           ensureAttributeIsMutable();
3566           attribute_.set(index, value);
3567           onChanged();
3568         } else {
3569           attributeBuilder_.setMessage(index, value);
3570         }
3571         return this;
3572       }
3573       /**
3574        * <code>repeated .NameBytesPair attribute = 3;</code>
3575        */
setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)3576       public Builder setAttribute(
3577           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3578         if (attributeBuilder_ == null) {
3579           ensureAttributeIsMutable();
3580           attribute_.set(index, builderForValue.build());
3581           onChanged();
3582         } else {
3583           attributeBuilder_.setMessage(index, builderForValue.build());
3584         }
3585         return this;
3586       }
3587       /**
3588        * <code>repeated .NameBytesPair attribute = 3;</code>
3589        */
addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)3590       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3591         if (attributeBuilder_ == null) {
3592           if (value == null) {
3593             throw new NullPointerException();
3594           }
3595           ensureAttributeIsMutable();
3596           attribute_.add(value);
3597           onChanged();
3598         } else {
3599           attributeBuilder_.addMessage(value);
3600         }
3601         return this;
3602       }
3603       /**
3604        * <code>repeated .NameBytesPair attribute = 3;</code>
3605        */
addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)3606       public Builder addAttribute(
3607           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
3608         if (attributeBuilder_ == null) {
3609           if (value == null) {
3610             throw new NullPointerException();
3611           }
3612           ensureAttributeIsMutable();
3613           attribute_.add(index, value);
3614           onChanged();
3615         } else {
3616           attributeBuilder_.addMessage(index, value);
3617         }
3618         return this;
3619       }
3620       /**
3621        * <code>repeated .NameBytesPair attribute = 3;</code>
3622        */
addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)3623       public Builder addAttribute(
3624           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3625         if (attributeBuilder_ == null) {
3626           ensureAttributeIsMutable();
3627           attribute_.add(builderForValue.build());
3628           onChanged();
3629         } else {
3630           attributeBuilder_.addMessage(builderForValue.build());
3631         }
3632         return this;
3633       }
3634       /**
3635        * <code>repeated .NameBytesPair attribute = 3;</code>
3636        */
addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)3637       public Builder addAttribute(
3638           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
3639         if (attributeBuilder_ == null) {
3640           ensureAttributeIsMutable();
3641           attribute_.add(index, builderForValue.build());
3642           onChanged();
3643         } else {
3644           attributeBuilder_.addMessage(index, builderForValue.build());
3645         }
3646         return this;
3647       }
3648       /**
3649        * <code>repeated .NameBytesPair attribute = 3;</code>
3650        */
addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values)3651       public Builder addAllAttribute(
3652           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
3653         if (attributeBuilder_ == null) {
3654           ensureAttributeIsMutable();
3655           super.addAll(values, attribute_);
3656           onChanged();
3657         } else {
3658           attributeBuilder_.addAllMessages(values);
3659         }
3660         return this;
3661       }
3662       /**
3663        * <code>repeated .NameBytesPair attribute = 3;</code>
3664        */
clearAttribute()3665       public Builder clearAttribute() {
3666         if (attributeBuilder_ == null) {
3667           attribute_ = java.util.Collections.emptyList();
3668           bitField0_ = (bitField0_ & ~0x00000004);
3669           onChanged();
3670         } else {
3671           attributeBuilder_.clear();
3672         }
3673         return this;
3674       }
3675       /**
3676        * <code>repeated .NameBytesPair attribute = 3;</code>
3677        */
removeAttribute(int index)3678       public Builder removeAttribute(int index) {
3679         if (attributeBuilder_ == null) {
3680           ensureAttributeIsMutable();
3681           attribute_.remove(index);
3682           onChanged();
3683         } else {
3684           attributeBuilder_.remove(index);
3685         }
3686         return this;
3687       }
3688       /**
3689        * <code>repeated .NameBytesPair attribute = 3;</code>
3690        */
getAttributeBuilder( int index)3691       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
3692           int index) {
3693         return getAttributeFieldBuilder().getBuilder(index);
3694       }
3695       /**
3696        * <code>repeated .NameBytesPair attribute = 3;</code>
3697        */
getAttributeOrBuilder( int index)3698       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
3699           int index) {
3700         if (attributeBuilder_ == null) {
3701           return attribute_.get(index);  } else {
3702           return attributeBuilder_.getMessageOrBuilder(index);
3703         }
3704       }
3705       /**
3706        * <code>repeated .NameBytesPair attribute = 3;</code>
3707        */
3708       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()3709            getAttributeOrBuilderList() {
3710         if (attributeBuilder_ != null) {
3711           return attributeBuilder_.getMessageOrBuilderList();
3712         } else {
3713           return java.util.Collections.unmodifiableList(attribute_);
3714         }
3715       }
3716       /**
3717        * <code>repeated .NameBytesPair attribute = 3;</code>
3718        */
addAttributeBuilder()3719       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
3720         return getAttributeFieldBuilder().addBuilder(
3721             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
3722       }
3723       /**
3724        * <code>repeated .NameBytesPair attribute = 3;</code>
3725        */
addAttributeBuilder( int index)3726       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
3727           int index) {
3728         return getAttributeFieldBuilder().addBuilder(
3729             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
3730       }
3731       /**
3732        * <code>repeated .NameBytesPair attribute = 3;</code>
3733        */
3734       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder>
getAttributeBuilderList()3735            getAttributeBuilderList() {
3736         return getAttributeFieldBuilder().getBuilderList();
3737       }
3738       private com.google.protobuf.RepeatedFieldBuilder<
3739           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeFieldBuilder()3740           getAttributeFieldBuilder() {
3741         if (attributeBuilder_ == null) {
3742           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
3743               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
3744                   attribute_,
3745                   ((bitField0_ & 0x00000004) == 0x00000004),
3746                   getParentForChildren(),
3747                   isClean());
3748           attribute_ = null;
3749         }
3750         return attributeBuilder_;
3751       }
3752 
3753       // optional .Filter filter = 4;
3754       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
3755       private com.google.protobuf.SingleFieldBuilder<
3756           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
3757       /**
3758        * <code>optional .Filter filter = 4;</code>
3759        */
hasFilter()3760       public boolean hasFilter() {
3761         return ((bitField0_ & 0x00000008) == 0x00000008);
3762       }
3763       /**
3764        * <code>optional .Filter filter = 4;</code>
3765        */
getFilter()3766       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
3767         if (filterBuilder_ == null) {
3768           return filter_;
3769         } else {
3770           return filterBuilder_.getMessage();
3771         }
3772       }
3773       /**
3774        * <code>optional .Filter filter = 4;</code>
3775        */
setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)3776       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
3777         if (filterBuilder_ == null) {
3778           if (value == null) {
3779             throw new NullPointerException();
3780           }
3781           filter_ = value;
3782           onChanged();
3783         } else {
3784           filterBuilder_.setMessage(value);
3785         }
3786         bitField0_ |= 0x00000008;
3787         return this;
3788       }
3789       /**
3790        * <code>optional .Filter filter = 4;</code>
3791        */
setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)3792       public Builder setFilter(
3793           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
3794         if (filterBuilder_ == null) {
3795           filter_ = builderForValue.build();
3796           onChanged();
3797         } else {
3798           filterBuilder_.setMessage(builderForValue.build());
3799         }
3800         bitField0_ |= 0x00000008;
3801         return this;
3802       }
3803       /**
3804        * <code>optional .Filter filter = 4;</code>
3805        */
mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)3806       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
3807         if (filterBuilder_ == null) {
3808           if (((bitField0_ & 0x00000008) == 0x00000008) &&
3809               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
3810             filter_ =
3811               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
3812           } else {
3813             filter_ = value;
3814           }
3815           onChanged();
3816         } else {
3817           filterBuilder_.mergeFrom(value);
3818         }
3819         bitField0_ |= 0x00000008;
3820         return this;
3821       }
3822       /**
3823        * <code>optional .Filter filter = 4;</code>
3824        */
clearFilter()3825       public Builder clearFilter() {
3826         if (filterBuilder_ == null) {
3827           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
3828           onChanged();
3829         } else {
3830           filterBuilder_.clear();
3831         }
3832         bitField0_ = (bitField0_ & ~0x00000008);
3833         return this;
3834       }
3835       /**
3836        * <code>optional .Filter filter = 4;</code>
3837        */
getFilterBuilder()3838       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
3839         bitField0_ |= 0x00000008;
3840         onChanged();
3841         return getFilterFieldBuilder().getBuilder();
3842       }
3843       /**
3844        * <code>optional .Filter filter = 4;</code>
3845        */
getFilterOrBuilder()3846       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
3847         if (filterBuilder_ != null) {
3848           return filterBuilder_.getMessageOrBuilder();
3849         } else {
3850           return filter_;
3851         }
3852       }
3853       /**
3854        * <code>optional .Filter filter = 4;</code>
3855        */
3856       private com.google.protobuf.SingleFieldBuilder<
3857           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder()3858           getFilterFieldBuilder() {
3859         if (filterBuilder_ == null) {
3860           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3861               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
3862                   filter_,
3863                   getParentForChildren(),
3864                   isClean());
3865           filter_ = null;
3866         }
3867         return filterBuilder_;
3868       }
3869 
3870       // optional .TimeRange time_range = 5;
3871       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
3872       private com.google.protobuf.SingleFieldBuilder<
3873           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
3874       /**
3875        * <code>optional .TimeRange time_range = 5;</code>
3876        */
hasTimeRange()3877       public boolean hasTimeRange() {
3878         return ((bitField0_ & 0x00000010) == 0x00000010);
3879       }
3880       /**
3881        * <code>optional .TimeRange time_range = 5;</code>
3882        */
getTimeRange()3883       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
3884         if (timeRangeBuilder_ == null) {
3885           return timeRange_;
3886         } else {
3887           return timeRangeBuilder_.getMessage();
3888         }
3889       }
3890       /**
3891        * <code>optional .TimeRange time_range = 5;</code>
3892        */
setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)3893       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
3894         if (timeRangeBuilder_ == null) {
3895           if (value == null) {
3896             throw new NullPointerException();
3897           }
3898           timeRange_ = value;
3899           onChanged();
3900         } else {
3901           timeRangeBuilder_.setMessage(value);
3902         }
3903         bitField0_ |= 0x00000010;
3904         return this;
3905       }
3906       /**
3907        * <code>optional .TimeRange time_range = 5;</code>
3908        */
setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)3909       public Builder setTimeRange(
3910           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
3911         if (timeRangeBuilder_ == null) {
3912           timeRange_ = builderForValue.build();
3913           onChanged();
3914         } else {
3915           timeRangeBuilder_.setMessage(builderForValue.build());
3916         }
3917         bitField0_ |= 0x00000010;
3918         return this;
3919       }
3920       /**
3921        * <code>optional .TimeRange time_range = 5;</code>
3922        */
mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)3923       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
3924         if (timeRangeBuilder_ == null) {
3925           if (((bitField0_ & 0x00000010) == 0x00000010) &&
3926               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
3927             timeRange_ =
3928               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
3929           } else {
3930             timeRange_ = value;
3931           }
3932           onChanged();
3933         } else {
3934           timeRangeBuilder_.mergeFrom(value);
3935         }
3936         bitField0_ |= 0x00000010;
3937         return this;
3938       }
3939       /**
3940        * <code>optional .TimeRange time_range = 5;</code>
3941        */
clearTimeRange()3942       public Builder clearTimeRange() {
3943         if (timeRangeBuilder_ == null) {
3944           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
3945           onChanged();
3946         } else {
3947           timeRangeBuilder_.clear();
3948         }
3949         bitField0_ = (bitField0_ & ~0x00000010);
3950         return this;
3951       }
3952       /**
3953        * <code>optional .TimeRange time_range = 5;</code>
3954        */
getTimeRangeBuilder()3955       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
3956         bitField0_ |= 0x00000010;
3957         onChanged();
3958         return getTimeRangeFieldBuilder().getBuilder();
3959       }
3960       /**
3961        * <code>optional .TimeRange time_range = 5;</code>
3962        */
getTimeRangeOrBuilder()3963       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
3964         if (timeRangeBuilder_ != null) {
3965           return timeRangeBuilder_.getMessageOrBuilder();
3966         } else {
3967           return timeRange_;
3968         }
3969       }
3970       /**
3971        * <code>optional .TimeRange time_range = 5;</code>
3972        */
3973       private com.google.protobuf.SingleFieldBuilder<
3974           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>
getTimeRangeFieldBuilder()3975           getTimeRangeFieldBuilder() {
3976         if (timeRangeBuilder_ == null) {
3977           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3978               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
3979                   timeRange_,
3980                   getParentForChildren(),
3981                   isClean());
3982           timeRange_ = null;
3983         }
3984         return timeRangeBuilder_;
3985       }
3986 
3987       // optional uint32 max_versions = 6 [default = 1];
3988       private int maxVersions_ = 1;
3989       /**
3990        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3991        */
hasMaxVersions()3992       public boolean hasMaxVersions() {
3993         return ((bitField0_ & 0x00000020) == 0x00000020);
3994       }
3995       /**
3996        * <code>optional uint32 max_versions = 6 [default = 1];</code>
3997        */
getMaxVersions()3998       public int getMaxVersions() {
3999         return maxVersions_;
4000       }
4001       /**
4002        * <code>optional uint32 max_versions = 6 [default = 1];</code>
4003        */
setMaxVersions(int value)4004       public Builder setMaxVersions(int value) {
4005         bitField0_ |= 0x00000020;
4006         maxVersions_ = value;
4007         onChanged();
4008         return this;
4009       }
4010       /**
4011        * <code>optional uint32 max_versions = 6 [default = 1];</code>
4012        */
clearMaxVersions()4013       public Builder clearMaxVersions() {
4014         bitField0_ = (bitField0_ & ~0x00000020);
4015         maxVersions_ = 1;
4016         onChanged();
4017         return this;
4018       }
4019 
4020       // optional bool cache_blocks = 7 [default = true];
4021       private boolean cacheBlocks_ = true;
4022       /**
4023        * <code>optional bool cache_blocks = 7 [default = true];</code>
4024        */
hasCacheBlocks()4025       public boolean hasCacheBlocks() {
4026         return ((bitField0_ & 0x00000040) == 0x00000040);
4027       }
4028       /**
4029        * <code>optional bool cache_blocks = 7 [default = true];</code>
4030        */
getCacheBlocks()4031       public boolean getCacheBlocks() {
4032         return cacheBlocks_;
4033       }
4034       /**
4035        * <code>optional bool cache_blocks = 7 [default = true];</code>
4036        */
setCacheBlocks(boolean value)4037       public Builder setCacheBlocks(boolean value) {
4038         bitField0_ |= 0x00000040;
4039         cacheBlocks_ = value;
4040         onChanged();
4041         return this;
4042       }
4043       /**
4044        * <code>optional bool cache_blocks = 7 [default = true];</code>
4045        */
clearCacheBlocks()4046       public Builder clearCacheBlocks() {
4047         bitField0_ = (bitField0_ & ~0x00000040);
4048         cacheBlocks_ = true;
4049         onChanged();
4050         return this;
4051       }
4052 
4053       // optional uint32 store_limit = 8;
4054       private int storeLimit_ ;
4055       /**
4056        * <code>optional uint32 store_limit = 8;</code>
4057        */
hasStoreLimit()4058       public boolean hasStoreLimit() {
4059         return ((bitField0_ & 0x00000080) == 0x00000080);
4060       }
4061       /**
4062        * <code>optional uint32 store_limit = 8;</code>
4063        */
getStoreLimit()4064       public int getStoreLimit() {
4065         return storeLimit_;
4066       }
4067       /**
4068        * <code>optional uint32 store_limit = 8;</code>
4069        */
setStoreLimit(int value)4070       public Builder setStoreLimit(int value) {
4071         bitField0_ |= 0x00000080;
4072         storeLimit_ = value;
4073         onChanged();
4074         return this;
4075       }
4076       /**
4077        * <code>optional uint32 store_limit = 8;</code>
4078        */
clearStoreLimit()4079       public Builder clearStoreLimit() {
4080         bitField0_ = (bitField0_ & ~0x00000080);
4081         storeLimit_ = 0;
4082         onChanged();
4083         return this;
4084       }
4085 
4086       // optional uint32 store_offset = 9;
4087       private int storeOffset_ ;
4088       /**
4089        * <code>optional uint32 store_offset = 9;</code>
4090        */
hasStoreOffset()4091       public boolean hasStoreOffset() {
4092         return ((bitField0_ & 0x00000100) == 0x00000100);
4093       }
4094       /**
4095        * <code>optional uint32 store_offset = 9;</code>
4096        */
getStoreOffset()4097       public int getStoreOffset() {
4098         return storeOffset_;
4099       }
4100       /**
4101        * <code>optional uint32 store_offset = 9;</code>
4102        */
setStoreOffset(int value)4103       public Builder setStoreOffset(int value) {
4104         bitField0_ |= 0x00000100;
4105         storeOffset_ = value;
4106         onChanged();
4107         return this;
4108       }
4109       /**
4110        * <code>optional uint32 store_offset = 9;</code>
4111        */
clearStoreOffset()4112       public Builder clearStoreOffset() {
4113         bitField0_ = (bitField0_ & ~0x00000100);
4114         storeOffset_ = 0;
4115         onChanged();
4116         return this;
4117       }
4118 
4119       // optional bool existence_only = 10 [default = false];
4120       private boolean existenceOnly_ ;
4121       /**
4122        * <code>optional bool existence_only = 10 [default = false];</code>
4123        *
4124        * <pre>
4125        * The result isn't asked for, just check for
4126        * the existence.
4127        * </pre>
4128        */
hasExistenceOnly()4129       public boolean hasExistenceOnly() {
4130         return ((bitField0_ & 0x00000200) == 0x00000200);
4131       }
4132       /**
4133        * <code>optional bool existence_only = 10 [default = false];</code>
4134        *
4135        * <pre>
4136        * The result isn't asked for, just check for
4137        * the existence.
4138        * </pre>
4139        */
getExistenceOnly()4140       public boolean getExistenceOnly() {
4141         return existenceOnly_;
4142       }
4143       /**
4144        * <code>optional bool existence_only = 10 [default = false];</code>
4145        *
4146        * <pre>
4147        * The result isn't asked for, just check for
4148        * the existence.
4149        * </pre>
4150        */
setExistenceOnly(boolean value)4151       public Builder setExistenceOnly(boolean value) {
4152         bitField0_ |= 0x00000200;
4153         existenceOnly_ = value;
4154         onChanged();
4155         return this;
4156       }
4157       /**
4158        * <code>optional bool existence_only = 10 [default = false];</code>
4159        *
4160        * <pre>
4161        * The result isn't asked for, just check for
4162        * the existence.
4163        * </pre>
4164        */
clearExistenceOnly()4165       public Builder clearExistenceOnly() {
4166         bitField0_ = (bitField0_ & ~0x00000200);
4167         existenceOnly_ = false;
4168         onChanged();
4169         return this;
4170       }
4171 
4172       // optional bool closest_row_before = 11 [default = false];
4173       private boolean closestRowBefore_ ;
4174       /**
4175        * <code>optional bool closest_row_before = 11 [default = false];</code>
4176        *
4177        * <pre>
4178        * If the row to get doesn't exist, return the
4179        * closest row before.
4180        * </pre>
4181        */
hasClosestRowBefore()4182       public boolean hasClosestRowBefore() {
4183         return ((bitField0_ & 0x00000400) == 0x00000400);
4184       }
4185       /**
4186        * <code>optional bool closest_row_before = 11 [default = false];</code>
4187        *
4188        * <pre>
4189        * If the row to get doesn't exist, return the
4190        * closest row before.
4191        * </pre>
4192        */
getClosestRowBefore()4193       public boolean getClosestRowBefore() {
4194         return closestRowBefore_;
4195       }
4196       /**
4197        * <code>optional bool closest_row_before = 11 [default = false];</code>
4198        *
4199        * <pre>
4200        * If the row to get doesn't exist, return the
4201        * closest row before.
4202        * </pre>
4203        */
setClosestRowBefore(boolean value)4204       public Builder setClosestRowBefore(boolean value) {
4205         bitField0_ |= 0x00000400;
4206         closestRowBefore_ = value;
4207         onChanged();
4208         return this;
4209       }
4210       /**
4211        * <code>optional bool closest_row_before = 11 [default = false];</code>
4212        *
4213        * <pre>
4214        * If the row to get doesn't exist, return the
4215        * closest row before.
4216        * </pre>
4217        */
clearClosestRowBefore()4218       public Builder clearClosestRowBefore() {
4219         bitField0_ = (bitField0_ & ~0x00000400);
4220         closestRowBefore_ = false;
4221         onChanged();
4222         return this;
4223       }
4224 
4225       // optional .Consistency consistency = 12 [default = STRONG];
4226       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
4227       /**
4228        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4229        */
hasConsistency()4230       public boolean hasConsistency() {
4231         return ((bitField0_ & 0x00000800) == 0x00000800);
4232       }
4233       /**
4234        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4235        */
getConsistency()4236       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
4237         return consistency_;
4238       }
4239       /**
4240        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4241        */
setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value)4242       public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
4243         if (value == null) {
4244           throw new NullPointerException();
4245         }
4246         bitField0_ |= 0x00000800;
4247         consistency_ = value;
4248         onChanged();
4249         return this;
4250       }
4251       /**
4252        * <code>optional .Consistency consistency = 12 [default = STRONG];</code>
4253        */
clearConsistency()4254       public Builder clearConsistency() {
4255         bitField0_ = (bitField0_ & ~0x00000800);
4256         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
4257         onChanged();
4258         return this;
4259       }
4260 
4261       // repeated .ColumnFamilyTimeRange cf_time_range = 13;
4262       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ =
4263         java.util.Collections.emptyList();
ensureCfTimeRangeIsMutable()4264       private void ensureCfTimeRangeIsMutable() {
4265         if (!((bitField0_ & 0x00001000) == 0x00001000)) {
4266           cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_);
4267           bitField0_ |= 0x00001000;
4268          }
4269       }
4270 
4271       private com.google.protobuf.RepeatedFieldBuilder<
4272           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_;
4273 
4274       /**
4275        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4276        */
getCfTimeRangeList()4277       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
4278         if (cfTimeRangeBuilder_ == null) {
4279           return java.util.Collections.unmodifiableList(cfTimeRange_);
4280         } else {
4281           return cfTimeRangeBuilder_.getMessageList();
4282         }
4283       }
4284       /**
4285        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4286        */
getCfTimeRangeCount()4287       public int getCfTimeRangeCount() {
4288         if (cfTimeRangeBuilder_ == null) {
4289           return cfTimeRange_.size();
4290         } else {
4291           return cfTimeRangeBuilder_.getCount();
4292         }
4293       }
4294       /**
4295        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4296        */
getCfTimeRange(int index)4297       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
4298         if (cfTimeRangeBuilder_ == null) {
4299           return cfTimeRange_.get(index);
4300         } else {
4301           return cfTimeRangeBuilder_.getMessage(index);
4302         }
4303       }
4304       /**
4305        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4306        */
setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)4307       public Builder setCfTimeRange(
4308           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
4309         if (cfTimeRangeBuilder_ == null) {
4310           if (value == null) {
4311             throw new NullPointerException();
4312           }
4313           ensureCfTimeRangeIsMutable();
4314           cfTimeRange_.set(index, value);
4315           onChanged();
4316         } else {
4317           cfTimeRangeBuilder_.setMessage(index, value);
4318         }
4319         return this;
4320       }
4321       /**
4322        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4323        */
setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)4324       public Builder setCfTimeRange(
4325           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
4326         if (cfTimeRangeBuilder_ == null) {
4327           ensureCfTimeRangeIsMutable();
4328           cfTimeRange_.set(index, builderForValue.build());
4329           onChanged();
4330         } else {
4331           cfTimeRangeBuilder_.setMessage(index, builderForValue.build());
4332         }
4333         return this;
4334       }
4335       /**
4336        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4337        */
addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)4338       public Builder addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
4339         if (cfTimeRangeBuilder_ == null) {
4340           if (value == null) {
4341             throw new NullPointerException();
4342           }
4343           ensureCfTimeRangeIsMutable();
4344           cfTimeRange_.add(value);
4345           onChanged();
4346         } else {
4347           cfTimeRangeBuilder_.addMessage(value);
4348         }
4349         return this;
4350       }
4351       /**
4352        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4353        */
addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)4354       public Builder addCfTimeRange(
4355           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
4356         if (cfTimeRangeBuilder_ == null) {
4357           if (value == null) {
4358             throw new NullPointerException();
4359           }
4360           ensureCfTimeRangeIsMutable();
4361           cfTimeRange_.add(index, value);
4362           onChanged();
4363         } else {
4364           cfTimeRangeBuilder_.addMessage(index, value);
4365         }
4366         return this;
4367       }
4368       /**
4369        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4370        */
addCfTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)4371       public Builder addCfTimeRange(
4372           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
4373         if (cfTimeRangeBuilder_ == null) {
4374           ensureCfTimeRangeIsMutable();
4375           cfTimeRange_.add(builderForValue.build());
4376           onChanged();
4377         } else {
4378           cfTimeRangeBuilder_.addMessage(builderForValue.build());
4379         }
4380         return this;
4381       }
4382       /**
4383        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4384        */
addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)4385       public Builder addCfTimeRange(
4386           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
4387         if (cfTimeRangeBuilder_ == null) {
4388           ensureCfTimeRangeIsMutable();
4389           cfTimeRange_.add(index, builderForValue.build());
4390           onChanged();
4391         } else {
4392           cfTimeRangeBuilder_.addMessage(index, builderForValue.build());
4393         }
4394         return this;
4395       }
4396       /**
4397        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4398        */
addAllCfTimeRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values)4399       public Builder addAllCfTimeRange(
4400           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) {
4401         if (cfTimeRangeBuilder_ == null) {
4402           ensureCfTimeRangeIsMutable();
4403           super.addAll(values, cfTimeRange_);
4404           onChanged();
4405         } else {
4406           cfTimeRangeBuilder_.addAllMessages(values);
4407         }
4408         return this;
4409       }
4410       /**
4411        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4412        */
clearCfTimeRange()4413       public Builder clearCfTimeRange() {
4414         if (cfTimeRangeBuilder_ == null) {
4415           cfTimeRange_ = java.util.Collections.emptyList();
4416           bitField0_ = (bitField0_ & ~0x00001000);
4417           onChanged();
4418         } else {
4419           cfTimeRangeBuilder_.clear();
4420         }
4421         return this;
4422       }
4423       /**
4424        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4425        */
removeCfTimeRange(int index)4426       public Builder removeCfTimeRange(int index) {
4427         if (cfTimeRangeBuilder_ == null) {
4428           ensureCfTimeRangeIsMutable();
4429           cfTimeRange_.remove(index);
4430           onChanged();
4431         } else {
4432           cfTimeRangeBuilder_.remove(index);
4433         }
4434         return this;
4435       }
4436       /**
4437        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4438        */
getCfTimeRangeBuilder( int index)4439       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder(
4440           int index) {
4441         return getCfTimeRangeFieldBuilder().getBuilder(index);
4442       }
4443       /**
4444        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4445        */
getCfTimeRangeOrBuilder( int index)4446       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
4447           int index) {
4448         if (cfTimeRangeBuilder_ == null) {
4449           return cfTimeRange_.get(index);  } else {
4450           return cfTimeRangeBuilder_.getMessageOrBuilder(index);
4451         }
4452       }
4453       /**
4454        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4455        */
4456       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList()4457            getCfTimeRangeOrBuilderList() {
4458         if (cfTimeRangeBuilder_ != null) {
4459           return cfTimeRangeBuilder_.getMessageOrBuilderList();
4460         } else {
4461           return java.util.Collections.unmodifiableList(cfTimeRange_);
4462         }
4463       }
4464       /**
4465        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4466        */
addCfTimeRangeBuilder()4467       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() {
4468         return getCfTimeRangeFieldBuilder().addBuilder(
4469             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
4470       }
4471       /**
4472        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4473        */
addCfTimeRangeBuilder( int index)4474       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder(
4475           int index) {
4476         return getCfTimeRangeFieldBuilder().addBuilder(
4477             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
4478       }
4479       /**
4480        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code>
4481        */
4482       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder>
getCfTimeRangeBuilderList()4483            getCfTimeRangeBuilderList() {
4484         return getCfTimeRangeFieldBuilder().getBuilderList();
4485       }
4486       private com.google.protobuf.RepeatedFieldBuilder<
4487           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeFieldBuilder()4488           getCfTimeRangeFieldBuilder() {
4489         if (cfTimeRangeBuilder_ == null) {
4490           cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
4491               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>(
4492                   cfTimeRange_,
4493                   ((bitField0_ & 0x00001000) == 0x00001000),
4494                   getParentForChildren(),
4495                   isClean());
4496           cfTimeRange_ = null;
4497         }
4498         return cfTimeRangeBuilder_;
4499       }
4500 
4501       // @@protoc_insertion_point(builder_scope:Get)
4502     }
4503 
4504     static {
4505       defaultInstance = new Get(true);
defaultInstance.initFields()4506       defaultInstance.initFields();
4507     }
4508 
4509     // @@protoc_insertion_point(class_scope:Get)
4510   }
4511 
4512   public interface ResultOrBuilder
4513       extends com.google.protobuf.MessageOrBuilder {
4514 
4515     // repeated .Cell cell = 1;
4516     /**
4517      * <code>repeated .Cell cell = 1;</code>
4518      *
4519      * <pre>
4520      * Result includes the Cells or else it just has a count of Cells
4521      * that are carried otherwise.
4522      * </pre>
4523      */
4524     java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>
getCellList()4525         getCellList();
4526     /**
4527      * <code>repeated .Cell cell = 1;</code>
4528      *
4529      * <pre>
4530      * Result includes the Cells or else it just has a count of Cells
4531      * that are carried otherwise.
4532      * </pre>
4533      */
getCell(int index)4534     org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index);
4535     /**
4536      * <code>repeated .Cell cell = 1;</code>
4537      *
4538      * <pre>
4539      * Result includes the Cells or else it just has a count of Cells
4540      * that are carried otherwise.
4541      * </pre>
4542      */
getCellCount()4543     int getCellCount();
4544     /**
4545      * <code>repeated .Cell cell = 1;</code>
4546      *
4547      * <pre>
4548      * Result includes the Cells or else it just has a count of Cells
4549      * that are carried otherwise.
4550      * </pre>
4551      */
4552     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellOrBuilderList()4553         getCellOrBuilderList();
4554     /**
4555      * <code>repeated .Cell cell = 1;</code>
4556      *
4557      * <pre>
4558      * Result includes the Cells or else it just has a count of Cells
4559      * that are carried otherwise.
4560      * </pre>
4561      */
getCellOrBuilder( int index)4562     org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
4563         int index);
4564 
4565     // optional int32 associated_cell_count = 2;
4566     /**
4567      * <code>optional int32 associated_cell_count = 2;</code>
4568      *
4569      * <pre>
4570      * The below count is set when the associated cells are
4571      * not part of this protobuf message; they are passed alongside
4572      * and then this Message is just a placeholder with metadata.
4573      * The count is needed to know how many to peel off the block of Cells as
4574      * ours.  NOTE: This is different from the pb managed cell_count of the
4575      * 'cell' field above which is non-null when the cells are pb'd.
4576      * </pre>
4577      */
hasAssociatedCellCount()4578     boolean hasAssociatedCellCount();
4579     /**
4580      * <code>optional int32 associated_cell_count = 2;</code>
4581      *
4582      * <pre>
4583      * The below count is set when the associated cells are
4584      * not part of this protobuf message; they are passed alongside
4585      * and then this Message is just a placeholder with metadata.
4586      * The count is needed to know how many to peel off the block of Cells as
4587      * ours.  NOTE: This is different from the pb managed cell_count of the
4588      * 'cell' field above which is non-null when the cells are pb'd.
4589      * </pre>
4590      */
getAssociatedCellCount()4591     int getAssociatedCellCount();
4592 
4593     // optional bool exists = 3;
4594     /**
4595      * <code>optional bool exists = 3;</code>
4596      *
4597      * <pre>
4598      * used for Get to check existence only. Not set if existence_only was not set to true
4599      *  in the query.
4600      * </pre>
4601      */
hasExists()4602     boolean hasExists();
4603     /**
4604      * <code>optional bool exists = 3;</code>
4605      *
4606      * <pre>
4607      * used for Get to check existence only. Not set if existence_only was not set to true
4608      *  in the query.
4609      * </pre>
4610      */
getExists()4611     boolean getExists();
4612 
4613     // optional bool stale = 4 [default = false];
4614     /**
4615      * <code>optional bool stale = 4 [default = false];</code>
4616      *
4617      * <pre>
4618      * Whether or not the results are coming from possibly stale data
4619      * </pre>
4620      */
hasStale()4621     boolean hasStale();
4622     /**
4623      * <code>optional bool stale = 4 [default = false];</code>
4624      *
4625      * <pre>
4626      * Whether or not the results are coming from possibly stale data
4627      * </pre>
4628      */
getStale()4629     boolean getStale();
4630 
4631     // optional bool partial = 5 [default = false];
4632     /**
4633      * <code>optional bool partial = 5 [default = false];</code>
4634      *
4635      * <pre>
4636      * Whether or not the entire result could be returned. Results will be split when
4637      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4638      * cells for a row and must be combined with a result containing the remaining cells
4639      * to form a complete result
4640      * </pre>
4641      */
hasPartial()4642     boolean hasPartial();
4643     /**
4644      * <code>optional bool partial = 5 [default = false];</code>
4645      *
4646      * <pre>
4647      * Whether or not the entire result could be returned. Results will be split when
4648      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4649      * cells for a row and must be combined with a result containing the remaining cells
4650      * to form a complete result
4651      * </pre>
4652      */
getPartial()4653     boolean getPartial();
4654   }
4655   /**
4656    * Protobuf type {@code Result}
4657    */
4658   public static final class Result extends
4659       com.google.protobuf.GeneratedMessage
4660       implements ResultOrBuilder {
4661     // Use Result.newBuilder() to construct.
Result(com.google.protobuf.GeneratedMessage.Builder<?> builder)4662     private Result(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4663       super(builder);
4664       this.unknownFields = builder.getUnknownFields();
4665     }
Result(boolean noInit)4666     private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4667 
4668     private static final Result defaultInstance;
getDefaultInstance()4669     public static Result getDefaultInstance() {
4670       return defaultInstance;
4671     }
4672 
getDefaultInstanceForType()4673     public Result getDefaultInstanceForType() {
4674       return defaultInstance;
4675     }
4676 
4677     private final com.google.protobuf.UnknownFieldSet unknownFields;
4678     @java.lang.Override
4679     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()4680         getUnknownFields() {
4681       return this.unknownFields;
4682     }
Result( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4683     private Result(
4684         com.google.protobuf.CodedInputStream input,
4685         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4686         throws com.google.protobuf.InvalidProtocolBufferException {
4687       initFields();
4688       int mutable_bitField0_ = 0;
4689       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4690           com.google.protobuf.UnknownFieldSet.newBuilder();
4691       try {
4692         boolean done = false;
4693         while (!done) {
4694           int tag = input.readTag();
4695           switch (tag) {
4696             case 0:
4697               done = true;
4698               break;
4699             default: {
4700               if (!parseUnknownField(input, unknownFields,
4701                                      extensionRegistry, tag)) {
4702                 done = true;
4703               }
4704               break;
4705             }
4706             case 10: {
4707               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4708                 cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>();
4709                 mutable_bitField0_ |= 0x00000001;
4710               }
4711               cell_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry));
4712               break;
4713             }
4714             case 16: {
4715               bitField0_ |= 0x00000001;
4716               associatedCellCount_ = input.readInt32();
4717               break;
4718             }
4719             case 24: {
4720               bitField0_ |= 0x00000002;
4721               exists_ = input.readBool();
4722               break;
4723             }
4724             case 32: {
4725               bitField0_ |= 0x00000004;
4726               stale_ = input.readBool();
4727               break;
4728             }
4729             case 40: {
4730               bitField0_ |= 0x00000008;
4731               partial_ = input.readBool();
4732               break;
4733             }
4734           }
4735         }
4736       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4737         throw e.setUnfinishedMessage(this);
4738       } catch (java.io.IOException e) {
4739         throw new com.google.protobuf.InvalidProtocolBufferException(
4740             e.getMessage()).setUnfinishedMessage(this);
4741       } finally {
4742         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
4743           cell_ = java.util.Collections.unmodifiableList(cell_);
4744         }
4745         this.unknownFields = unknownFields.build();
4746         makeExtensionsImmutable();
4747       }
4748     }
4749     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4750         getDescriptor() {
4751       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor;
4752     }
4753 
4754     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4755         internalGetFieldAccessorTable() {
4756       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable
4757           .ensureFieldAccessorsInitialized(
4758               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
4759     }
4760 
4761     public static com.google.protobuf.Parser<Result> PARSER =
4762         new com.google.protobuf.AbstractParser<Result>() {
4763       public Result parsePartialFrom(
4764           com.google.protobuf.CodedInputStream input,
4765           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4766           throws com.google.protobuf.InvalidProtocolBufferException {
4767         return new Result(input, extensionRegistry);
4768       }
4769     };
4770 
4771     @java.lang.Override
getParserForType()4772     public com.google.protobuf.Parser<Result> getParserForType() {
4773       return PARSER;
4774     }
4775 
4776     private int bitField0_;
4777     // repeated .Cell cell = 1;
4778     public static final int CELL_FIELD_NUMBER = 1;
4779     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_;
4780     /**
4781      * <code>repeated .Cell cell = 1;</code>
4782      *
4783      * <pre>
4784      * Result includes the Cells or else it just has a count of Cells
4785      * that are carried otherwise.
4786      * </pre>
4787      */
getCellList()4788     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
4789       return cell_;
4790     }
4791     /**
4792      * <code>repeated .Cell cell = 1;</code>
4793      *
4794      * <pre>
4795      * Result includes the Cells or else it just has a count of Cells
4796      * that are carried otherwise.
4797      * </pre>
4798      */
4799     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellOrBuilderList()4800         getCellOrBuilderList() {
4801       return cell_;
4802     }
4803     /**
4804      * <code>repeated .Cell cell = 1;</code>
4805      *
4806      * <pre>
4807      * Result includes the Cells or else it just has a count of Cells
4808      * that are carried otherwise.
4809      * </pre>
4810      */
getCellCount()4811     public int getCellCount() {
4812       return cell_.size();
4813     }
4814     /**
4815      * <code>repeated .Cell cell = 1;</code>
4816      *
4817      * <pre>
4818      * Result includes the Cells or else it just has a count of Cells
4819      * that are carried otherwise.
4820      * </pre>
4821      */
getCell(int index)4822     public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
4823       return cell_.get(index);
4824     }
4825     /**
4826      * <code>repeated .Cell cell = 1;</code>
4827      *
4828      * <pre>
4829      * Result includes the Cells or else it just has a count of Cells
4830      * that are carried otherwise.
4831      * </pre>
4832      */
getCellOrBuilder( int index)4833     public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
4834         int index) {
4835       return cell_.get(index);
4836     }
4837 
4838     // optional int32 associated_cell_count = 2;
4839     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2;
4840     private int associatedCellCount_;
4841     /**
4842      * <code>optional int32 associated_cell_count = 2;</code>
4843      *
4844      * <pre>
4845      * The below count is set when the associated cells are
4846      * not part of this protobuf message; they are passed alongside
4847      * and then this Message is just a placeholder with metadata.
4848      * The count is needed to know how many to peel off the block of Cells as
4849      * ours.  NOTE: This is different from the pb managed cell_count of the
4850      * 'cell' field above which is non-null when the cells are pb'd.
4851      * </pre>
4852      */
hasAssociatedCellCount()4853     public boolean hasAssociatedCellCount() {
4854       return ((bitField0_ & 0x00000001) == 0x00000001);
4855     }
4856     /**
4857      * <code>optional int32 associated_cell_count = 2;</code>
4858      *
4859      * <pre>
4860      * The below count is set when the associated cells are
4861      * not part of this protobuf message; they are passed alongside
4862      * and then this Message is just a placeholder with metadata.
4863      * The count is needed to know how many to peel off the block of Cells as
4864      * ours.  NOTE: This is different from the pb managed cell_count of the
4865      * 'cell' field above which is non-null when the cells are pb'd.
4866      * </pre>
4867      */
getAssociatedCellCount()4868     public int getAssociatedCellCount() {
4869       return associatedCellCount_;
4870     }
4871 
4872     // optional bool exists = 3;
4873     public static final int EXISTS_FIELD_NUMBER = 3;
4874     private boolean exists_;
4875     /**
4876      * <code>optional bool exists = 3;</code>
4877      *
4878      * <pre>
4879      * used for Get to check existence only. Not set if existence_only was not set to true
4880      *  in the query.
4881      * </pre>
4882      */
hasExists()4883     public boolean hasExists() {
4884       return ((bitField0_ & 0x00000002) == 0x00000002);
4885     }
4886     /**
4887      * <code>optional bool exists = 3;</code>
4888      *
4889      * <pre>
4890      * used for Get to check existence only. Not set if existence_only was not set to true
4891      *  in the query.
4892      * </pre>
4893      */
getExists()4894     public boolean getExists() {
4895       return exists_;
4896     }
4897 
4898     // optional bool stale = 4 [default = false];
4899     public static final int STALE_FIELD_NUMBER = 4;
4900     private boolean stale_;
4901     /**
4902      * <code>optional bool stale = 4 [default = false];</code>
4903      *
4904      * <pre>
4905      * Whether or not the results are coming from possibly stale data
4906      * </pre>
4907      */
hasStale()4908     public boolean hasStale() {
4909       return ((bitField0_ & 0x00000004) == 0x00000004);
4910     }
4911     /**
4912      * <code>optional bool stale = 4 [default = false];</code>
4913      *
4914      * <pre>
4915      * Whether or not the results are coming from possibly stale data
4916      * </pre>
4917      */
getStale()4918     public boolean getStale() {
4919       return stale_;
4920     }
4921 
4922     // optional bool partial = 5 [default = false];
4923     public static final int PARTIAL_FIELD_NUMBER = 5;
4924     private boolean partial_;
4925     /**
4926      * <code>optional bool partial = 5 [default = false];</code>
4927      *
4928      * <pre>
4929      * Whether or not the entire result could be returned. Results will be split when
4930      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4931      * cells for a row and must be combined with a result containing the remaining cells
4932      * to form a complete result
4933      * </pre>
4934      */
hasPartial()4935     public boolean hasPartial() {
4936       return ((bitField0_ & 0x00000008) == 0x00000008);
4937     }
4938     /**
4939      * <code>optional bool partial = 5 [default = false];</code>
4940      *
4941      * <pre>
4942      * Whether or not the entire result could be returned. Results will be split when
4943      * the RPC chunk size limit is reached. Partial results contain only a subset of the
4944      * cells for a row and must be combined with a result containing the remaining cells
4945      * to form a complete result
4946      * </pre>
4947      */
getPartial()4948     public boolean getPartial() {
4949       return partial_;
4950     }
4951 
initFields()4952     private void initFields() {
4953       cell_ = java.util.Collections.emptyList();
4954       associatedCellCount_ = 0;
4955       exists_ = false;
4956       stale_ = false;
4957       partial_ = false;
4958     }
4959     private byte memoizedIsInitialized = -1;
isInitialized()4960     public final boolean isInitialized() {
4961       byte isInitialized = memoizedIsInitialized;
4962       if (isInitialized != -1) return isInitialized == 1;
4963 
4964       memoizedIsInitialized = 1;
4965       return true;
4966     }
4967 
writeTo(com.google.protobuf.CodedOutputStream output)4968     public void writeTo(com.google.protobuf.CodedOutputStream output)
4969                         throws java.io.IOException {
4970       getSerializedSize();
4971       for (int i = 0; i < cell_.size(); i++) {
4972         output.writeMessage(1, cell_.get(i));
4973       }
4974       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4975         output.writeInt32(2, associatedCellCount_);
4976       }
4977       if (((bitField0_ & 0x00000002) == 0x00000002)) {
4978         output.writeBool(3, exists_);
4979       }
4980       if (((bitField0_ & 0x00000004) == 0x00000004)) {
4981         output.writeBool(4, stale_);
4982       }
4983       if (((bitField0_ & 0x00000008) == 0x00000008)) {
4984         output.writeBool(5, partial_);
4985       }
4986       getUnknownFields().writeTo(output);
4987     }
4988 
4989     private int memoizedSerializedSize = -1;
getSerializedSize()4990     public int getSerializedSize() {
4991       int size = memoizedSerializedSize;
4992       if (size != -1) return size;
4993 
4994       size = 0;
4995       for (int i = 0; i < cell_.size(); i++) {
4996         size += com.google.protobuf.CodedOutputStream
4997           .computeMessageSize(1, cell_.get(i));
4998       }
4999       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5000         size += com.google.protobuf.CodedOutputStream
5001           .computeInt32Size(2, associatedCellCount_);
5002       }
5003       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5004         size += com.google.protobuf.CodedOutputStream
5005           .computeBoolSize(3, exists_);
5006       }
5007       if (((bitField0_ & 0x00000004) == 0x00000004)) {
5008         size += com.google.protobuf.CodedOutputStream
5009           .computeBoolSize(4, stale_);
5010       }
5011       if (((bitField0_ & 0x00000008) == 0x00000008)) {
5012         size += com.google.protobuf.CodedOutputStream
5013           .computeBoolSize(5, partial_);
5014       }
5015       size += getUnknownFields().getSerializedSize();
5016       memoizedSerializedSize = size;
5017       return size;
5018     }
5019 
5020     private static final long serialVersionUID = 0L;
5021     @java.lang.Override
writeReplace()5022     protected java.lang.Object writeReplace()
5023         throws java.io.ObjectStreamException {
5024       return super.writeReplace();
5025     }
5026 
5027     @java.lang.Override
equals(final java.lang.Object obj)5028     public boolean equals(final java.lang.Object obj) {
5029       if (obj == this) {
5030        return true;
5031       }
5032       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) {
5033         return super.equals(obj);
5034       }
5035       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj;
5036 
5037       boolean result = true;
5038       result = result && getCellList()
5039           .equals(other.getCellList());
5040       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
5041       if (hasAssociatedCellCount()) {
5042         result = result && (getAssociatedCellCount()
5043             == other.getAssociatedCellCount());
5044       }
5045       result = result && (hasExists() == other.hasExists());
5046       if (hasExists()) {
5047         result = result && (getExists()
5048             == other.getExists());
5049       }
5050       result = result && (hasStale() == other.hasStale());
5051       if (hasStale()) {
5052         result = result && (getStale()
5053             == other.getStale());
5054       }
5055       result = result && (hasPartial() == other.hasPartial());
5056       if (hasPartial()) {
5057         result = result && (getPartial()
5058             == other.getPartial());
5059       }
5060       result = result &&
5061           getUnknownFields().equals(other.getUnknownFields());
5062       return result;
5063     }
5064 
5065     private int memoizedHashCode = 0;
5066     @java.lang.Override
hashCode()5067     public int hashCode() {
5068       if (memoizedHashCode != 0) {
5069         return memoizedHashCode;
5070       }
5071       int hash = 41;
5072       hash = (19 * hash) + getDescriptorForType().hashCode();
5073       if (getCellCount() > 0) {
5074         hash = (37 * hash) + CELL_FIELD_NUMBER;
5075         hash = (53 * hash) + getCellList().hashCode();
5076       }
5077       if (hasAssociatedCellCount()) {
5078         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
5079         hash = (53 * hash) + getAssociatedCellCount();
5080       }
5081       if (hasExists()) {
5082         hash = (37 * hash) + EXISTS_FIELD_NUMBER;
5083         hash = (53 * hash) + hashBoolean(getExists());
5084       }
5085       if (hasStale()) {
5086         hash = (37 * hash) + STALE_FIELD_NUMBER;
5087         hash = (53 * hash) + hashBoolean(getStale());
5088       }
5089       if (hasPartial()) {
5090         hash = (37 * hash) + PARTIAL_FIELD_NUMBER;
5091         hash = (53 * hash) + hashBoolean(getPartial());
5092       }
5093       hash = (29 * hash) + getUnknownFields().hashCode();
5094       memoizedHashCode = hash;
5095       return hash;
5096     }
5097 
parseFrom( com.google.protobuf.ByteString data)5098     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
5099         com.google.protobuf.ByteString data)
5100         throws com.google.protobuf.InvalidProtocolBufferException {
5101       return PARSER.parseFrom(data);
5102     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5103     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
5104         com.google.protobuf.ByteString data,
5105         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5106         throws com.google.protobuf.InvalidProtocolBufferException {
5107       return PARSER.parseFrom(data, extensionRegistry);
5108     }
parseFrom(byte[] data)5109     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data)
5110         throws com.google.protobuf.InvalidProtocolBufferException {
5111       return PARSER.parseFrom(data);
5112     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5113     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
5114         byte[] data,
5115         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5116         throws com.google.protobuf.InvalidProtocolBufferException {
5117       return PARSER.parseFrom(data, extensionRegistry);
5118     }
parseFrom(java.io.InputStream input)5119     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input)
5120         throws java.io.IOException {
5121       return PARSER.parseFrom(input);
5122     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5123     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
5124         java.io.InputStream input,
5125         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5126         throws java.io.IOException {
5127       return PARSER.parseFrom(input, extensionRegistry);
5128     }
parseDelimitedFrom(java.io.InputStream input)5129     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input)
5130         throws java.io.IOException {
5131       return PARSER.parseDelimitedFrom(input);
5132     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5133     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(
5134         java.io.InputStream input,
5135         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5136         throws java.io.IOException {
5137       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5138     }
parseFrom( com.google.protobuf.CodedInputStream input)5139     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
5140         com.google.protobuf.CodedInputStream input)
5141         throws java.io.IOException {
5142       return PARSER.parseFrom(input);
5143     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5144     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(
5145         com.google.protobuf.CodedInputStream input,
5146         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5147         throws java.io.IOException {
5148       return PARSER.parseFrom(input, extensionRegistry);
5149     }
5150 
newBuilder()5151     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()5152     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype)5153     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) {
5154       return newBuilder().mergeFrom(prototype);
5155     }
toBuilder()5156     public Builder toBuilder() { return newBuilder(this); }
5157 
5158     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5159     protected Builder newBuilderForType(
5160         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5161       Builder builder = new Builder(parent);
5162       return builder;
5163     }
5164     /**
5165      * Protobuf type {@code Result}
5166      */
5167     public static final class Builder extends
5168         com.google.protobuf.GeneratedMessage.Builder<Builder>
5169        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder {
5170       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5171           getDescriptor() {
5172         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor;
5173       }
5174 
5175       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5176           internalGetFieldAccessorTable() {
5177         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable
5178             .ensureFieldAccessorsInitialized(
5179                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class);
5180       }
5181 
5182       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder()
Builder()5183       private Builder() {
5184         maybeForceBuilderInitialization();
5185       }
5186 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5187       private Builder(
5188           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5189         super(parent);
5190         maybeForceBuilderInitialization();
5191       }
maybeForceBuilderInitialization()5192       private void maybeForceBuilderInitialization() {
5193         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5194           getCellFieldBuilder();
5195         }
5196       }
create()5197       private static Builder create() {
5198         return new Builder();
5199       }
5200 
clear()5201       public Builder clear() {
5202         super.clear();
5203         if (cellBuilder_ == null) {
5204           cell_ = java.util.Collections.emptyList();
5205           bitField0_ = (bitField0_ & ~0x00000001);
5206         } else {
5207           cellBuilder_.clear();
5208         }
5209         associatedCellCount_ = 0;
5210         bitField0_ = (bitField0_ & ~0x00000002);
5211         exists_ = false;
5212         bitField0_ = (bitField0_ & ~0x00000004);
5213         stale_ = false;
5214         bitField0_ = (bitField0_ & ~0x00000008);
5215         partial_ = false;
5216         bitField0_ = (bitField0_ & ~0x00000010);
5217         return this;
5218       }
5219 
clone()5220       public Builder clone() {
5221         return create().mergeFrom(buildPartial());
5222       }
5223 
5224       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()5225           getDescriptorForType() {
5226         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor;
5227       }
5228 
getDefaultInstanceForType()5229       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() {
5230         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
5231       }
5232 
build()5233       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() {
5234         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial();
5235         if (!result.isInitialized()) {
5236           throw newUninitializedMessageException(result);
5237         }
5238         return result;
5239       }
5240 
buildPartial()5241       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() {
5242         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this);
5243         int from_bitField0_ = bitField0_;
5244         int to_bitField0_ = 0;
5245         if (cellBuilder_ == null) {
5246           if (((bitField0_ & 0x00000001) == 0x00000001)) {
5247             cell_ = java.util.Collections.unmodifiableList(cell_);
5248             bitField0_ = (bitField0_ & ~0x00000001);
5249           }
5250           result.cell_ = cell_;
5251         } else {
5252           result.cell_ = cellBuilder_.build();
5253         }
5254         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5255           to_bitField0_ |= 0x00000001;
5256         }
5257         result.associatedCellCount_ = associatedCellCount_;
5258         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
5259           to_bitField0_ |= 0x00000002;
5260         }
5261         result.exists_ = exists_;
5262         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
5263           to_bitField0_ |= 0x00000004;
5264         }
5265         result.stale_ = stale_;
5266         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
5267           to_bitField0_ |= 0x00000008;
5268         }
5269         result.partial_ = partial_;
5270         result.bitField0_ = to_bitField0_;
5271         onBuilt();
5272         return result;
5273       }
5274 
mergeFrom(com.google.protobuf.Message other)5275       public Builder mergeFrom(com.google.protobuf.Message other) {
5276         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) {
5277           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other);
5278         } else {
5279           super.mergeFrom(other);
5280           return this;
5281         }
5282       }
5283 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other)5284       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) {
5285         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this;
5286         if (cellBuilder_ == null) {
5287           if (!other.cell_.isEmpty()) {
5288             if (cell_.isEmpty()) {
5289               cell_ = other.cell_;
5290               bitField0_ = (bitField0_ & ~0x00000001);
5291             } else {
5292               ensureCellIsMutable();
5293               cell_.addAll(other.cell_);
5294             }
5295             onChanged();
5296           }
5297         } else {
5298           if (!other.cell_.isEmpty()) {
5299             if (cellBuilder_.isEmpty()) {
5300               cellBuilder_.dispose();
5301               cellBuilder_ = null;
5302               cell_ = other.cell_;
5303               bitField0_ = (bitField0_ & ~0x00000001);
5304               cellBuilder_ =
5305                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5306                    getCellFieldBuilder() : null;
5307             } else {
5308               cellBuilder_.addAllMessages(other.cell_);
5309             }
5310           }
5311         }
5312         if (other.hasAssociatedCellCount()) {
5313           setAssociatedCellCount(other.getAssociatedCellCount());
5314         }
5315         if (other.hasExists()) {
5316           setExists(other.getExists());
5317         }
5318         if (other.hasStale()) {
5319           setStale(other.getStale());
5320         }
5321         if (other.hasPartial()) {
5322           setPartial(other.getPartial());
5323         }
5324         this.mergeUnknownFields(other.getUnknownFields());
5325         return this;
5326       }
5327 
isInitialized()5328       public final boolean isInitialized() {
5329         return true;
5330       }
5331 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5332       public Builder mergeFrom(
5333           com.google.protobuf.CodedInputStream input,
5334           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5335           throws java.io.IOException {
5336         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parsedMessage = null;
5337         try {
5338           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5339         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5340           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage();
5341           throw e;
5342         } finally {
5343           if (parsedMessage != null) {
5344             mergeFrom(parsedMessage);
5345           }
5346         }
5347         return this;
5348       }
5349       private int bitField0_;
5350 
5351       // repeated .Cell cell = 1;
5352       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_ =
5353         java.util.Collections.emptyList();
ensureCellIsMutable()5354       private void ensureCellIsMutable() {
5355         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
5356           cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>(cell_);
5357           bitField0_ |= 0x00000001;
5358          }
5359       }
5360 
5361       private com.google.protobuf.RepeatedFieldBuilder<
5362           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_;
5363 
5364       /**
5365        * <code>repeated .Cell cell = 1;</code>
5366        *
5367        * <pre>
5368        * Result includes the Cells or else it just has a count of Cells
5369        * that are carried otherwise.
5370        * </pre>
5371        */
getCellList()5372       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() {
5373         if (cellBuilder_ == null) {
5374           return java.util.Collections.unmodifiableList(cell_);
5375         } else {
5376           return cellBuilder_.getMessageList();
5377         }
5378       }
5379       /**
5380        * <code>repeated .Cell cell = 1;</code>
5381        *
5382        * <pre>
5383        * Result includes the Cells or else it just has a count of Cells
5384        * that are carried otherwise.
5385        * </pre>
5386        */
getCellCount()5387       public int getCellCount() {
5388         if (cellBuilder_ == null) {
5389           return cell_.size();
5390         } else {
5391           return cellBuilder_.getCount();
5392         }
5393       }
5394       /**
5395        * <code>repeated .Cell cell = 1;</code>
5396        *
5397        * <pre>
5398        * Result includes the Cells or else it just has a count of Cells
5399        * that are carried otherwise.
5400        * </pre>
5401        */
getCell(int index)5402       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) {
5403         if (cellBuilder_ == null) {
5404           return cell_.get(index);
5405         } else {
5406           return cellBuilder_.getMessage(index);
5407         }
5408       }
5409       /**
5410        * <code>repeated .Cell cell = 1;</code>
5411        *
5412        * <pre>
5413        * Result includes the Cells or else it just has a count of Cells
5414        * that are carried otherwise.
5415        * </pre>
5416        */
setCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value)5417       public Builder setCell(
5418           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
5419         if (cellBuilder_ == null) {
5420           if (value == null) {
5421             throw new NullPointerException();
5422           }
5423           ensureCellIsMutable();
5424           cell_.set(index, value);
5425           onChanged();
5426         } else {
5427           cellBuilder_.setMessage(index, value);
5428         }
5429         return this;
5430       }
5431       /**
5432        * <code>repeated .Cell cell = 1;</code>
5433        *
5434        * <pre>
5435        * Result includes the Cells or else it just has a count of Cells
5436        * that are carried otherwise.
5437        * </pre>
5438        */
setCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue)5439       public Builder setCell(
5440           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5441         if (cellBuilder_ == null) {
5442           ensureCellIsMutable();
5443           cell_.set(index, builderForValue.build());
5444           onChanged();
5445         } else {
5446           cellBuilder_.setMessage(index, builderForValue.build());
5447         }
5448         return this;
5449       }
5450       /**
5451        * <code>repeated .Cell cell = 1;</code>
5452        *
5453        * <pre>
5454        * Result includes the Cells or else it just has a count of Cells
5455        * that are carried otherwise.
5456        * </pre>
5457        */
addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value)5458       public Builder addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
5459         if (cellBuilder_ == null) {
5460           if (value == null) {
5461             throw new NullPointerException();
5462           }
5463           ensureCellIsMutable();
5464           cell_.add(value);
5465           onChanged();
5466         } else {
5467           cellBuilder_.addMessage(value);
5468         }
5469         return this;
5470       }
5471       /**
5472        * <code>repeated .Cell cell = 1;</code>
5473        *
5474        * <pre>
5475        * Result includes the Cells or else it just has a count of Cells
5476        * that are carried otherwise.
5477        * </pre>
5478        */
addCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value)5479       public Builder addCell(
5480           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) {
5481         if (cellBuilder_ == null) {
5482           if (value == null) {
5483             throw new NullPointerException();
5484           }
5485           ensureCellIsMutable();
5486           cell_.add(index, value);
5487           onChanged();
5488         } else {
5489           cellBuilder_.addMessage(index, value);
5490         }
5491         return this;
5492       }
5493       /**
5494        * <code>repeated .Cell cell = 1;</code>
5495        *
5496        * <pre>
5497        * Result includes the Cells or else it just has a count of Cells
5498        * that are carried otherwise.
5499        * </pre>
5500        */
addCell( org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue)5501       public Builder addCell(
5502           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5503         if (cellBuilder_ == null) {
5504           ensureCellIsMutable();
5505           cell_.add(builderForValue.build());
5506           onChanged();
5507         } else {
5508           cellBuilder_.addMessage(builderForValue.build());
5509         }
5510         return this;
5511       }
5512       /**
5513        * <code>repeated .Cell cell = 1;</code>
5514        *
5515        * <pre>
5516        * Result includes the Cells or else it just has a count of Cells
5517        * that are carried otherwise.
5518        * </pre>
5519        */
addCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue)5520       public Builder addCell(
5521           int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) {
5522         if (cellBuilder_ == null) {
5523           ensureCellIsMutable();
5524           cell_.add(index, builderForValue.build());
5525           onChanged();
5526         } else {
5527           cellBuilder_.addMessage(index, builderForValue.build());
5528         }
5529         return this;
5530       }
5531       /**
5532        * <code>repeated .Cell cell = 1;</code>
5533        *
5534        * <pre>
5535        * Result includes the Cells or else it just has a count of Cells
5536        * that are carried otherwise.
5537        * </pre>
5538        */
addAllCell( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values)5539       public Builder addAllCell(
5540           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values) {
5541         if (cellBuilder_ == null) {
5542           ensureCellIsMutable();
5543           super.addAll(values, cell_);
5544           onChanged();
5545         } else {
5546           cellBuilder_.addAllMessages(values);
5547         }
5548         return this;
5549       }
5550       /**
5551        * <code>repeated .Cell cell = 1;</code>
5552        *
5553        * <pre>
5554        * Result includes the Cells or else it just has a count of Cells
5555        * that are carried otherwise.
5556        * </pre>
5557        */
clearCell()5558       public Builder clearCell() {
5559         if (cellBuilder_ == null) {
5560           cell_ = java.util.Collections.emptyList();
5561           bitField0_ = (bitField0_ & ~0x00000001);
5562           onChanged();
5563         } else {
5564           cellBuilder_.clear();
5565         }
5566         return this;
5567       }
5568       /**
5569        * <code>repeated .Cell cell = 1;</code>
5570        *
5571        * <pre>
5572        * Result includes the Cells or else it just has a count of Cells
5573        * that are carried otherwise.
5574        * </pre>
5575        */
removeCell(int index)5576       public Builder removeCell(int index) {
5577         if (cellBuilder_ == null) {
5578           ensureCellIsMutable();
5579           cell_.remove(index);
5580           onChanged();
5581         } else {
5582           cellBuilder_.remove(index);
5583         }
5584         return this;
5585       }
5586       /**
5587        * <code>repeated .Cell cell = 1;</code>
5588        *
5589        * <pre>
5590        * Result includes the Cells or else it just has a count of Cells
5591        * that are carried otherwise.
5592        * </pre>
5593        */
getCellBuilder( int index)5594       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder getCellBuilder(
5595           int index) {
5596         return getCellFieldBuilder().getBuilder(index);
5597       }
5598       /**
5599        * <code>repeated .Cell cell = 1;</code>
5600        *
5601        * <pre>
5602        * Result includes the Cells or else it just has a count of Cells
5603        * that are carried otherwise.
5604        * </pre>
5605        */
getCellOrBuilder( int index)5606       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder(
5607           int index) {
5608         if (cellBuilder_ == null) {
5609           return cell_.get(index);  } else {
5610           return cellBuilder_.getMessageOrBuilder(index);
5611         }
5612       }
5613       /**
5614        * <code>repeated .Cell cell = 1;</code>
5615        *
5616        * <pre>
5617        * Result includes the Cells or else it just has a count of Cells
5618        * that are carried otherwise.
5619        * </pre>
5620        */
5621       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellOrBuilderList()5622            getCellOrBuilderList() {
5623         if (cellBuilder_ != null) {
5624           return cellBuilder_.getMessageOrBuilderList();
5625         } else {
5626           return java.util.Collections.unmodifiableList(cell_);
5627         }
5628       }
5629       /**
5630        * <code>repeated .Cell cell = 1;</code>
5631        *
5632        * <pre>
5633        * Result includes the Cells or else it just has a count of Cells
5634        * that are carried otherwise.
5635        * </pre>
5636        */
addCellBuilder()5637       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() {
5638         return getCellFieldBuilder().addBuilder(
5639             org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
5640       }
5641       /**
5642        * <code>repeated .Cell cell = 1;</code>
5643        *
5644        * <pre>
5645        * Result includes the Cells or else it just has a count of Cells
5646        * that are carried otherwise.
5647        * </pre>
5648        */
addCellBuilder( int index)5649       public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder(
5650           int index) {
5651         return getCellFieldBuilder().addBuilder(
5652             index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance());
5653       }
5654       /**
5655        * <code>repeated .Cell cell = 1;</code>
5656        *
5657        * <pre>
5658        * Result includes the Cells or else it just has a count of Cells
5659        * that are carried otherwise.
5660        * </pre>
5661        */
5662       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder>
getCellBuilderList()5663            getCellBuilderList() {
5664         return getCellFieldBuilder().getBuilderList();
5665       }
5666       private com.google.protobuf.RepeatedFieldBuilder<
5667           org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>
getCellFieldBuilder()5668           getCellFieldBuilder() {
5669         if (cellBuilder_ == null) {
5670           cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
5671               org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>(
5672                   cell_,
5673                   ((bitField0_ & 0x00000001) == 0x00000001),
5674                   getParentForChildren(),
5675                   isClean());
5676           cell_ = null;
5677         }
5678         return cellBuilder_;
5679       }
5680 
5681       // optional int32 associated_cell_count = 2;
5682       private int associatedCellCount_ ;
5683       /**
5684        * <code>optional int32 associated_cell_count = 2;</code>
5685        *
5686        * <pre>
5687        * The below count is set when the associated cells are
5688        * not part of this protobuf message; they are passed alongside
5689        * and then this Message is just a placeholder with metadata.
5690        * The count is needed to know how many to peel off the block of Cells as
5691        * ours.  NOTE: This is different from the pb managed cell_count of the
5692        * 'cell' field above which is non-null when the cells are pb'd.
5693        * </pre>
5694        */
hasAssociatedCellCount()5695       public boolean hasAssociatedCellCount() {
5696         return ((bitField0_ & 0x00000002) == 0x00000002);
5697       }
5698       /**
5699        * <code>optional int32 associated_cell_count = 2;</code>
5700        *
5701        * <pre>
5702        * The below count is set when the associated cells are
5703        * not part of this protobuf message; they are passed alongside
5704        * and then this Message is just a placeholder with metadata.
5705        * The count is needed to know how many to peel off the block of Cells as
5706        * ours.  NOTE: This is different from the pb managed cell_count of the
5707        * 'cell' field above which is non-null when the cells are pb'd.
5708        * </pre>
5709        */
getAssociatedCellCount()5710       public int getAssociatedCellCount() {
5711         return associatedCellCount_;
5712       }
5713       /**
5714        * <code>optional int32 associated_cell_count = 2;</code>
5715        *
5716        * <pre>
5717        * The below count is set when the associated cells are
5718        * not part of this protobuf message; they are passed alongside
5719        * and then this Message is just a placeholder with metadata.
5720        * The count is needed to know how many to peel off the block of Cells as
5721        * ours.  NOTE: This is different from the pb managed cell_count of the
5722        * 'cell' field above which is non-null when the cells are pb'd.
5723        * </pre>
5724        */
setAssociatedCellCount(int value)5725       public Builder setAssociatedCellCount(int value) {
5726         bitField0_ |= 0x00000002;
5727         associatedCellCount_ = value;
5728         onChanged();
5729         return this;
5730       }
5731       /**
5732        * <code>optional int32 associated_cell_count = 2;</code>
5733        *
5734        * <pre>
5735        * The below count is set when the associated cells are
5736        * not part of this protobuf message; they are passed alongside
5737        * and then this Message is just a placeholder with metadata.
5738        * The count is needed to know how many to peel off the block of Cells as
5739        * ours.  NOTE: This is different from the pb managed cell_count of the
5740        * 'cell' field above which is non-null when the cells are pb'd.
5741        * </pre>
5742        */
clearAssociatedCellCount()5743       public Builder clearAssociatedCellCount() {
5744         bitField0_ = (bitField0_ & ~0x00000002);
5745         associatedCellCount_ = 0;
5746         onChanged();
5747         return this;
5748       }
5749 
5750       // optional bool exists = 3;
5751       private boolean exists_ ;
5752       /**
5753        * <code>optional bool exists = 3;</code>
5754        *
5755        * <pre>
5756        * used for Get to check existence only. Not set if existence_only was not set to true
5757        *  in the query.
5758        * </pre>
5759        */
hasExists()5760       public boolean hasExists() {
5761         return ((bitField0_ & 0x00000004) == 0x00000004);
5762       }
5763       /**
5764        * <code>optional bool exists = 3;</code>
5765        *
5766        * <pre>
5767        * used for Get to check existence only. Not set if existence_only was not set to true
5768        *  in the query.
5769        * </pre>
5770        */
getExists()5771       public boolean getExists() {
5772         return exists_;
5773       }
5774       /**
5775        * <code>optional bool exists = 3;</code>
5776        *
5777        * <pre>
5778        * used for Get to check existence only. Not set if existence_only was not set to true
5779        *  in the query.
5780        * </pre>
5781        */
setExists(boolean value)5782       public Builder setExists(boolean value) {
5783         bitField0_ |= 0x00000004;
5784         exists_ = value;
5785         onChanged();
5786         return this;
5787       }
5788       /**
5789        * <code>optional bool exists = 3;</code>
5790        *
5791        * <pre>
5792        * used for Get to check existence only. Not set if existence_only was not set to true
5793        *  in the query.
5794        * </pre>
5795        */
clearExists()5796       public Builder clearExists() {
5797         bitField0_ = (bitField0_ & ~0x00000004);
5798         exists_ = false;
5799         onChanged();
5800         return this;
5801       }
5802 
5803       // optional bool stale = 4 [default = false];
5804       private boolean stale_ ;
5805       /**
5806        * <code>optional bool stale = 4 [default = false];</code>
5807        *
5808        * <pre>
5809        * Whether or not the results are coming from possibly stale data
5810        * </pre>
5811        */
hasStale()5812       public boolean hasStale() {
5813         return ((bitField0_ & 0x00000008) == 0x00000008);
5814       }
5815       /**
5816        * <code>optional bool stale = 4 [default = false];</code>
5817        *
5818        * <pre>
5819        * Whether or not the results are coming from possibly stale data
5820        * </pre>
5821        */
getStale()5822       public boolean getStale() {
5823         return stale_;
5824       }
5825       /**
5826        * <code>optional bool stale = 4 [default = false];</code>
5827        *
5828        * <pre>
5829        * Whether or not the results are coming from possibly stale data
5830        * </pre>
5831        */
setStale(boolean value)5832       public Builder setStale(boolean value) {
5833         bitField0_ |= 0x00000008;
5834         stale_ = value;
5835         onChanged();
5836         return this;
5837       }
5838       /**
5839        * <code>optional bool stale = 4 [default = false];</code>
5840        *
5841        * <pre>
5842        * Whether or not the results are coming from possibly stale data
5843        * </pre>
5844        */
clearStale()5845       public Builder clearStale() {
5846         bitField0_ = (bitField0_ & ~0x00000008);
5847         stale_ = false;
5848         onChanged();
5849         return this;
5850       }
5851 
5852       // optional bool partial = 5 [default = false];
5853       private boolean partial_ ;
5854       /**
5855        * <code>optional bool partial = 5 [default = false];</code>
5856        *
5857        * <pre>
5858        * Whether or not the entire result could be returned. Results will be split when
5859        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5860        * cells for a row and must be combined with a result containing the remaining cells
5861        * to form a complete result
5862        * </pre>
5863        */
hasPartial()5864       public boolean hasPartial() {
5865         return ((bitField0_ & 0x00000010) == 0x00000010);
5866       }
5867       /**
5868        * <code>optional bool partial = 5 [default = false];</code>
5869        *
5870        * <pre>
5871        * Whether or not the entire result could be returned. Results will be split when
5872        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5873        * cells for a row and must be combined with a result containing the remaining cells
5874        * to form a complete result
5875        * </pre>
5876        */
getPartial()5877       public boolean getPartial() {
5878         return partial_;
5879       }
5880       /**
5881        * <code>optional bool partial = 5 [default = false];</code>
5882        *
5883        * <pre>
5884        * Whether or not the entire result could be returned. Results will be split when
5885        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5886        * cells for a row and must be combined with a result containing the remaining cells
5887        * to form a complete result
5888        * </pre>
5889        */
setPartial(boolean value)5890       public Builder setPartial(boolean value) {
5891         bitField0_ |= 0x00000010;
5892         partial_ = value;
5893         onChanged();
5894         return this;
5895       }
5896       /**
5897        * <code>optional bool partial = 5 [default = false];</code>
5898        *
5899        * <pre>
5900        * Whether or not the entire result could be returned. Results will be split when
5901        * the RPC chunk size limit is reached. Partial results contain only a subset of the
5902        * cells for a row and must be combined with a result containing the remaining cells
5903        * to form a complete result
5904        * </pre>
5905        */
clearPartial()5906       public Builder clearPartial() {
5907         bitField0_ = (bitField0_ & ~0x00000010);
5908         partial_ = false;
5909         onChanged();
5910         return this;
5911       }
5912 
5913       // @@protoc_insertion_point(builder_scope:Result)
5914     }
5915 
5916     static {
5917       defaultInstance = new Result(true);
defaultInstance.initFields()5918       defaultInstance.initFields();
5919     }
5920 
5921     // @@protoc_insertion_point(class_scope:Result)
5922   }
5923 
5924   public interface GetRequestOrBuilder
5925       extends com.google.protobuf.MessageOrBuilder {
5926 
5927     // required .RegionSpecifier region = 1;
5928     /**
5929      * <code>required .RegionSpecifier region = 1;</code>
5930      */
hasRegion()5931     boolean hasRegion();
5932     /**
5933      * <code>required .RegionSpecifier region = 1;</code>
5934      */
getRegion()5935     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
5936     /**
5937      * <code>required .RegionSpecifier region = 1;</code>
5938      */
getRegionOrBuilder()5939     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
5940 
5941     // required .Get get = 2;
5942     /**
5943      * <code>required .Get get = 2;</code>
5944      */
hasGet()5945     boolean hasGet();
5946     /**
5947      * <code>required .Get get = 2;</code>
5948      */
getGet()5949     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
5950     /**
5951      * <code>required .Get get = 2;</code>
5952      */
getGetOrBuilder()5953     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
5954   }
5955   /**
5956    * Protobuf type {@code GetRequest}
5957    *
5958    * <pre>
5959    **
5960    * The get request. Perform a single Get operation.
5961    * </pre>
5962    */
5963   public static final class GetRequest extends
5964       com.google.protobuf.GeneratedMessage
5965       implements GetRequestOrBuilder {
5966     // Use GetRequest.newBuilder() to construct.
GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)5967     private GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5968       super(builder);
5969       this.unknownFields = builder.getUnknownFields();
5970     }
GetRequest(boolean noInit)5971     private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5972 
5973     private static final GetRequest defaultInstance;
getDefaultInstance()5974     public static GetRequest getDefaultInstance() {
5975       return defaultInstance;
5976     }
5977 
getDefaultInstanceForType()5978     public GetRequest getDefaultInstanceForType() {
5979       return defaultInstance;
5980     }
5981 
5982     private final com.google.protobuf.UnknownFieldSet unknownFields;
5983     @java.lang.Override
5984     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()5985         getUnknownFields() {
5986       return this.unknownFields;
5987     }
GetRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5988     private GetRequest(
5989         com.google.protobuf.CodedInputStream input,
5990         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5991         throws com.google.protobuf.InvalidProtocolBufferException {
5992       initFields();
5993       int mutable_bitField0_ = 0;
5994       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5995           com.google.protobuf.UnknownFieldSet.newBuilder();
5996       try {
5997         boolean done = false;
5998         while (!done) {
5999           int tag = input.readTag();
6000           switch (tag) {
6001             case 0:
6002               done = true;
6003               break;
6004             default: {
6005               if (!parseUnknownField(input, unknownFields,
6006                                      extensionRegistry, tag)) {
6007                 done = true;
6008               }
6009               break;
6010             }
6011             case 10: {
6012               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
6013               if (((bitField0_ & 0x00000001) == 0x00000001)) {
6014                 subBuilder = region_.toBuilder();
6015               }
6016               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
6017               if (subBuilder != null) {
6018                 subBuilder.mergeFrom(region_);
6019                 region_ = subBuilder.buildPartial();
6020               }
6021               bitField0_ |= 0x00000001;
6022               break;
6023             }
6024             case 18: {
6025               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
6026               if (((bitField0_ & 0x00000002) == 0x00000002)) {
6027                 subBuilder = get_.toBuilder();
6028               }
6029               get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
6030               if (subBuilder != null) {
6031                 subBuilder.mergeFrom(get_);
6032                 get_ = subBuilder.buildPartial();
6033               }
6034               bitField0_ |= 0x00000002;
6035               break;
6036             }
6037           }
6038         }
6039       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6040         throw e.setUnfinishedMessage(this);
6041       } catch (java.io.IOException e) {
6042         throw new com.google.protobuf.InvalidProtocolBufferException(
6043             e.getMessage()).setUnfinishedMessage(this);
6044       } finally {
6045         this.unknownFields = unknownFields.build();
6046         makeExtensionsImmutable();
6047       }
6048     }
6049     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6050         getDescriptor() {
6051       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor;
6052     }
6053 
6054     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6055         internalGetFieldAccessorTable() {
6056       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable
6057           .ensureFieldAccessorsInitialized(
6058               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
6059     }
6060 
6061     public static com.google.protobuf.Parser<GetRequest> PARSER =
6062         new com.google.protobuf.AbstractParser<GetRequest>() {
6063       public GetRequest parsePartialFrom(
6064           com.google.protobuf.CodedInputStream input,
6065           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6066           throws com.google.protobuf.InvalidProtocolBufferException {
6067         return new GetRequest(input, extensionRegistry);
6068       }
6069     };
6070 
6071     @java.lang.Override
getParserForType()6072     public com.google.protobuf.Parser<GetRequest> getParserForType() {
6073       return PARSER;
6074     }
6075 
6076     private int bitField0_;
6077     // required .RegionSpecifier region = 1;
6078     public static final int REGION_FIELD_NUMBER = 1;
6079     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
6080     /**
6081      * <code>required .RegionSpecifier region = 1;</code>
6082      */
hasRegion()6083     public boolean hasRegion() {
6084       return ((bitField0_ & 0x00000001) == 0x00000001);
6085     }
6086     /**
6087      * <code>required .RegionSpecifier region = 1;</code>
6088      */
getRegion()6089     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
6090       return region_;
6091     }
6092     /**
6093      * <code>required .RegionSpecifier region = 1;</code>
6094      */
getRegionOrBuilder()6095     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
6096       return region_;
6097     }
6098 
6099     // required .Get get = 2;
6100     public static final int GET_FIELD_NUMBER = 2;
6101     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
6102     /**
6103      * <code>required .Get get = 2;</code>
6104      */
hasGet()6105     public boolean hasGet() {
6106       return ((bitField0_ & 0x00000002) == 0x00000002);
6107     }
6108     /**
6109      * <code>required .Get get = 2;</code>
6110      */
getGet()6111     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
6112       return get_;
6113     }
6114     /**
6115      * <code>required .Get get = 2;</code>
6116      */
getGetOrBuilder()6117     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
6118       return get_;
6119     }
6120 
initFields()6121     private void initFields() {
6122       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6123       get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6124     }
6125     private byte memoizedIsInitialized = -1;
isInitialized()6126     public final boolean isInitialized() {
6127       byte isInitialized = memoizedIsInitialized;
6128       if (isInitialized != -1) return isInitialized == 1;
6129 
6130       if (!hasRegion()) {
6131         memoizedIsInitialized = 0;
6132         return false;
6133       }
6134       if (!hasGet()) {
6135         memoizedIsInitialized = 0;
6136         return false;
6137       }
6138       if (!getRegion().isInitialized()) {
6139         memoizedIsInitialized = 0;
6140         return false;
6141       }
6142       if (!getGet().isInitialized()) {
6143         memoizedIsInitialized = 0;
6144         return false;
6145       }
6146       memoizedIsInitialized = 1;
6147       return true;
6148     }
6149 
writeTo(com.google.protobuf.CodedOutputStream output)6150     public void writeTo(com.google.protobuf.CodedOutputStream output)
6151                         throws java.io.IOException {
6152       getSerializedSize();
6153       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6154         output.writeMessage(1, region_);
6155       }
6156       if (((bitField0_ & 0x00000002) == 0x00000002)) {
6157         output.writeMessage(2, get_);
6158       }
6159       getUnknownFields().writeTo(output);
6160     }
6161 
6162     private int memoizedSerializedSize = -1;
getSerializedSize()6163     public int getSerializedSize() {
6164       int size = memoizedSerializedSize;
6165       if (size != -1) return size;
6166 
6167       size = 0;
6168       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6169         size += com.google.protobuf.CodedOutputStream
6170           .computeMessageSize(1, region_);
6171       }
6172       if (((bitField0_ & 0x00000002) == 0x00000002)) {
6173         size += com.google.protobuf.CodedOutputStream
6174           .computeMessageSize(2, get_);
6175       }
6176       size += getUnknownFields().getSerializedSize();
6177       memoizedSerializedSize = size;
6178       return size;
6179     }
6180 
6181     private static final long serialVersionUID = 0L;
6182     @java.lang.Override
writeReplace()6183     protected java.lang.Object writeReplace()
6184         throws java.io.ObjectStreamException {
6185       return super.writeReplace();
6186     }
6187 
6188     @java.lang.Override
equals(final java.lang.Object obj)6189     public boolean equals(final java.lang.Object obj) {
6190       if (obj == this) {
6191        return true;
6192       }
6193       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) {
6194         return super.equals(obj);
6195       }
6196       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj;
6197 
6198       boolean result = true;
6199       result = result && (hasRegion() == other.hasRegion());
6200       if (hasRegion()) {
6201         result = result && getRegion()
6202             .equals(other.getRegion());
6203       }
6204       result = result && (hasGet() == other.hasGet());
6205       if (hasGet()) {
6206         result = result && getGet()
6207             .equals(other.getGet());
6208       }
6209       result = result &&
6210           getUnknownFields().equals(other.getUnknownFields());
6211       return result;
6212     }
6213 
6214     private int memoizedHashCode = 0;
6215     @java.lang.Override
hashCode()6216     public int hashCode() {
6217       if (memoizedHashCode != 0) {
6218         return memoizedHashCode;
6219       }
6220       int hash = 41;
6221       hash = (19 * hash) + getDescriptorForType().hashCode();
6222       if (hasRegion()) {
6223         hash = (37 * hash) + REGION_FIELD_NUMBER;
6224         hash = (53 * hash) + getRegion().hashCode();
6225       }
6226       if (hasGet()) {
6227         hash = (37 * hash) + GET_FIELD_NUMBER;
6228         hash = (53 * hash) + getGet().hashCode();
6229       }
6230       hash = (29 * hash) + getUnknownFields().hashCode();
6231       memoizedHashCode = hash;
6232       return hash;
6233     }
6234 
parseFrom( com.google.protobuf.ByteString data)6235     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
6236         com.google.protobuf.ByteString data)
6237         throws com.google.protobuf.InvalidProtocolBufferException {
6238       return PARSER.parseFrom(data);
6239     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6240     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
6241         com.google.protobuf.ByteString data,
6242         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6243         throws com.google.protobuf.InvalidProtocolBufferException {
6244       return PARSER.parseFrom(data, extensionRegistry);
6245     }
parseFrom(byte[] data)6246     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data)
6247         throws com.google.protobuf.InvalidProtocolBufferException {
6248       return PARSER.parseFrom(data);
6249     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6250     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
6251         byte[] data,
6252         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6253         throws com.google.protobuf.InvalidProtocolBufferException {
6254       return PARSER.parseFrom(data, extensionRegistry);
6255     }
parseFrom(java.io.InputStream input)6256     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input)
6257         throws java.io.IOException {
6258       return PARSER.parseFrom(input);
6259     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6260     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
6261         java.io.InputStream input,
6262         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6263         throws java.io.IOException {
6264       return PARSER.parseFrom(input, extensionRegistry);
6265     }
parseDelimitedFrom(java.io.InputStream input)6266     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input)
6267         throws java.io.IOException {
6268       return PARSER.parseDelimitedFrom(input);
6269     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6270     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(
6271         java.io.InputStream input,
6272         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6273         throws java.io.IOException {
6274       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6275     }
parseFrom( com.google.protobuf.CodedInputStream input)6276     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
6277         com.google.protobuf.CodedInputStream input)
6278         throws java.io.IOException {
6279       return PARSER.parseFrom(input);
6280     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6281     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(
6282         com.google.protobuf.CodedInputStream input,
6283         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6284         throws java.io.IOException {
6285       return PARSER.parseFrom(input, extensionRegistry);
6286     }
6287 
newBuilder()6288     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6289     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype)6290     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) {
6291       return newBuilder().mergeFrom(prototype);
6292     }
toBuilder()6293     public Builder toBuilder() { return newBuilder(this); }
6294 
6295     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6296     protected Builder newBuilderForType(
6297         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6298       Builder builder = new Builder(parent);
6299       return builder;
6300     }
6301     /**
6302      * Protobuf type {@code GetRequest}
6303      *
6304      * <pre>
6305      **
6306      * The get request. Perform a single Get operation.
6307      * </pre>
6308      */
6309     public static final class Builder extends
6310         com.google.protobuf.GeneratedMessage.Builder<Builder>
6311        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder {
6312       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6313           getDescriptor() {
6314         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor;
6315       }
6316 
6317       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6318           internalGetFieldAccessorTable() {
6319         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable
6320             .ensureFieldAccessorsInitialized(
6321                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class);
6322       }
6323 
6324       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder()
Builder()6325       private Builder() {
6326         maybeForceBuilderInitialization();
6327       }
6328 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6329       private Builder(
6330           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6331         super(parent);
6332         maybeForceBuilderInitialization();
6333       }
maybeForceBuilderInitialization()6334       private void maybeForceBuilderInitialization() {
6335         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6336           getRegionFieldBuilder();
6337           getGetFieldBuilder();
6338         }
6339       }
create()6340       private static Builder create() {
6341         return new Builder();
6342       }
6343 
clear()6344       public Builder clear() {
6345         super.clear();
6346         if (regionBuilder_ == null) {
6347           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6348         } else {
6349           regionBuilder_.clear();
6350         }
6351         bitField0_ = (bitField0_ & ~0x00000001);
6352         if (getBuilder_ == null) {
6353           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6354         } else {
6355           getBuilder_.clear();
6356         }
6357         bitField0_ = (bitField0_ & ~0x00000002);
6358         return this;
6359       }
6360 
clone()6361       public Builder clone() {
6362         return create().mergeFrom(buildPartial());
6363       }
6364 
6365       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6366           getDescriptorForType() {
6367         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor;
6368       }
6369 
getDefaultInstanceForType()6370       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() {
6371         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
6372       }
6373 
build()6374       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() {
6375         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial();
6376         if (!result.isInitialized()) {
6377           throw newUninitializedMessageException(result);
6378         }
6379         return result;
6380       }
6381 
buildPartial()6382       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() {
6383         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this);
6384         int from_bitField0_ = bitField0_;
6385         int to_bitField0_ = 0;
6386         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6387           to_bitField0_ |= 0x00000001;
6388         }
6389         if (regionBuilder_ == null) {
6390           result.region_ = region_;
6391         } else {
6392           result.region_ = regionBuilder_.build();
6393         }
6394         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
6395           to_bitField0_ |= 0x00000002;
6396         }
6397         if (getBuilder_ == null) {
6398           result.get_ = get_;
6399         } else {
6400           result.get_ = getBuilder_.build();
6401         }
6402         result.bitField0_ = to_bitField0_;
6403         onBuilt();
6404         return result;
6405       }
6406 
mergeFrom(com.google.protobuf.Message other)6407       public Builder mergeFrom(com.google.protobuf.Message other) {
6408         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) {
6409           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other);
6410         } else {
6411           super.mergeFrom(other);
6412           return this;
6413         }
6414       }
6415 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other)6416       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) {
6417         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this;
6418         if (other.hasRegion()) {
6419           mergeRegion(other.getRegion());
6420         }
6421         if (other.hasGet()) {
6422           mergeGet(other.getGet());
6423         }
6424         this.mergeUnknownFields(other.getUnknownFields());
6425         return this;
6426       }
6427 
isInitialized()6428       public final boolean isInitialized() {
6429         if (!hasRegion()) {
6430 
6431           return false;
6432         }
6433         if (!hasGet()) {
6434 
6435           return false;
6436         }
6437         if (!getRegion().isInitialized()) {
6438 
6439           return false;
6440         }
6441         if (!getGet().isInitialized()) {
6442 
6443           return false;
6444         }
6445         return true;
6446       }
6447 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6448       public Builder mergeFrom(
6449           com.google.protobuf.CodedInputStream input,
6450           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6451           throws java.io.IOException {
6452         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parsedMessage = null;
6453         try {
6454           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
6455         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6456           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage();
6457           throw e;
6458         } finally {
6459           if (parsedMessage != null) {
6460             mergeFrom(parsedMessage);
6461           }
6462         }
6463         return this;
6464       }
6465       private int bitField0_;
6466 
6467       // required .RegionSpecifier region = 1;
6468       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6469       private com.google.protobuf.SingleFieldBuilder<
6470           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
6471       /**
6472        * <code>required .RegionSpecifier region = 1;</code>
6473        */
hasRegion()6474       public boolean hasRegion() {
6475         return ((bitField0_ & 0x00000001) == 0x00000001);
6476       }
6477       /**
6478        * <code>required .RegionSpecifier region = 1;</code>
6479        */
getRegion()6480       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
6481         if (regionBuilder_ == null) {
6482           return region_;
6483         } else {
6484           return regionBuilder_.getMessage();
6485         }
6486       }
6487       /**
6488        * <code>required .RegionSpecifier region = 1;</code>
6489        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)6490       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6491         if (regionBuilder_ == null) {
6492           if (value == null) {
6493             throw new NullPointerException();
6494           }
6495           region_ = value;
6496           onChanged();
6497         } else {
6498           regionBuilder_.setMessage(value);
6499         }
6500         bitField0_ |= 0x00000001;
6501         return this;
6502       }
6503       /**
6504        * <code>required .RegionSpecifier region = 1;</code>
6505        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)6506       public Builder setRegion(
6507           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
6508         if (regionBuilder_ == null) {
6509           region_ = builderForValue.build();
6510           onChanged();
6511         } else {
6512           regionBuilder_.setMessage(builderForValue.build());
6513         }
6514         bitField0_ |= 0x00000001;
6515         return this;
6516       }
6517       /**
6518        * <code>required .RegionSpecifier region = 1;</code>
6519        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)6520       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
6521         if (regionBuilder_ == null) {
6522           if (((bitField0_ & 0x00000001) == 0x00000001) &&
6523               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
6524             region_ =
6525               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
6526           } else {
6527             region_ = value;
6528           }
6529           onChanged();
6530         } else {
6531           regionBuilder_.mergeFrom(value);
6532         }
6533         bitField0_ |= 0x00000001;
6534         return this;
6535       }
6536       /**
6537        * <code>required .RegionSpecifier region = 1;</code>
6538        */
clearRegion()6539       public Builder clearRegion() {
6540         if (regionBuilder_ == null) {
6541           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
6542           onChanged();
6543         } else {
6544           regionBuilder_.clear();
6545         }
6546         bitField0_ = (bitField0_ & ~0x00000001);
6547         return this;
6548       }
6549       /**
6550        * <code>required .RegionSpecifier region = 1;</code>
6551        */
getRegionBuilder()6552       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
6553         bitField0_ |= 0x00000001;
6554         onChanged();
6555         return getRegionFieldBuilder().getBuilder();
6556       }
6557       /**
6558        * <code>required .RegionSpecifier region = 1;</code>
6559        */
getRegionOrBuilder()6560       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
6561         if (regionBuilder_ != null) {
6562           return regionBuilder_.getMessageOrBuilder();
6563         } else {
6564           return region_;
6565         }
6566       }
6567       /**
6568        * <code>required .RegionSpecifier region = 1;</code>
6569        */
6570       private com.google.protobuf.SingleFieldBuilder<
6571           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()6572           getRegionFieldBuilder() {
6573         if (regionBuilder_ == null) {
6574           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6575               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
6576                   region_,
6577                   getParentForChildren(),
6578                   isClean());
6579           region_ = null;
6580         }
6581         return regionBuilder_;
6582       }
6583 
6584       // required .Get get = 2;
6585       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6586       private com.google.protobuf.SingleFieldBuilder<
6587           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
6588       /**
6589        * <code>required .Get get = 2;</code>
6590        */
hasGet()6591       public boolean hasGet() {
6592         return ((bitField0_ & 0x00000002) == 0x00000002);
6593       }
6594       /**
6595        * <code>required .Get get = 2;</code>
6596        */
getGet()6597       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
6598         if (getBuilder_ == null) {
6599           return get_;
6600         } else {
6601           return getBuilder_.getMessage();
6602         }
6603       }
6604       /**
6605        * <code>required .Get get = 2;</code>
6606        */
setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value)6607       public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
6608         if (getBuilder_ == null) {
6609           if (value == null) {
6610             throw new NullPointerException();
6611           }
6612           get_ = value;
6613           onChanged();
6614         } else {
6615           getBuilder_.setMessage(value);
6616         }
6617         bitField0_ |= 0x00000002;
6618         return this;
6619       }
6620       /**
6621        * <code>required .Get get = 2;</code>
6622        */
setGet( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue)6623       public Builder setGet(
6624           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
6625         if (getBuilder_ == null) {
6626           get_ = builderForValue.build();
6627           onChanged();
6628         } else {
6629           getBuilder_.setMessage(builderForValue.build());
6630         }
6631         bitField0_ |= 0x00000002;
6632         return this;
6633       }
6634       /**
6635        * <code>required .Get get = 2;</code>
6636        */
mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value)6637       public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
6638         if (getBuilder_ == null) {
6639           if (((bitField0_ & 0x00000002) == 0x00000002) &&
6640               get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
6641             get_ =
6642               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
6643           } else {
6644             get_ = value;
6645           }
6646           onChanged();
6647         } else {
6648           getBuilder_.mergeFrom(value);
6649         }
6650         bitField0_ |= 0x00000002;
6651         return this;
6652       }
6653       /**
6654        * <code>required .Get get = 2;</code>
6655        */
clearGet()6656       public Builder clearGet() {
6657         if (getBuilder_ == null) {
6658           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
6659           onChanged();
6660         } else {
6661           getBuilder_.clear();
6662         }
6663         bitField0_ = (bitField0_ & ~0x00000002);
6664         return this;
6665       }
6666       /**
6667        * <code>required .Get get = 2;</code>
6668        */
getGetBuilder()6669       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
6670         bitField0_ |= 0x00000002;
6671         onChanged();
6672         return getGetFieldBuilder().getBuilder();
6673       }
6674       /**
6675        * <code>required .Get get = 2;</code>
6676        */
getGetOrBuilder()6677       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
6678         if (getBuilder_ != null) {
6679           return getBuilder_.getMessageOrBuilder();
6680         } else {
6681           return get_;
6682         }
6683       }
6684       /**
6685        * <code>required .Get get = 2;</code>
6686        */
6687       private com.google.protobuf.SingleFieldBuilder<
6688           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>
getGetFieldBuilder()6689           getGetFieldBuilder() {
6690         if (getBuilder_ == null) {
6691           getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
6692               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
6693                   get_,
6694                   getParentForChildren(),
6695                   isClean());
6696           get_ = null;
6697         }
6698         return getBuilder_;
6699       }
6700 
6701       // @@protoc_insertion_point(builder_scope:GetRequest)
6702     }
6703 
6704     static {
6705       defaultInstance = new GetRequest(true);
defaultInstance.initFields()6706       defaultInstance.initFields();
6707     }
6708 
6709     // @@protoc_insertion_point(class_scope:GetRequest)
6710   }
6711 
6712   public interface GetResponseOrBuilder
6713       extends com.google.protobuf.MessageOrBuilder {
6714 
6715     // optional .Result result = 1;
6716     /**
6717      * <code>optional .Result result = 1;</code>
6718      */
hasResult()6719     boolean hasResult();
6720     /**
6721      * <code>optional .Result result = 1;</code>
6722      */
getResult()6723     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
6724     /**
6725      * <code>optional .Result result = 1;</code>
6726      */
getResultOrBuilder()6727     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
6728   }
6729   /**
6730    * Protobuf type {@code GetResponse}
6731    */
6732   public static final class GetResponse extends
6733       com.google.protobuf.GeneratedMessage
6734       implements GetResponseOrBuilder {
6735     // Use GetResponse.newBuilder() to construct.
GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)6736     private GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6737       super(builder);
6738       this.unknownFields = builder.getUnknownFields();
6739     }
GetResponse(boolean noInit)6740     private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6741 
6742     private static final GetResponse defaultInstance;
getDefaultInstance()6743     public static GetResponse getDefaultInstance() {
6744       return defaultInstance;
6745     }
6746 
getDefaultInstanceForType()6747     public GetResponse getDefaultInstanceForType() {
6748       return defaultInstance;
6749     }
6750 
6751     private final com.google.protobuf.UnknownFieldSet unknownFields;
6752     @java.lang.Override
6753     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()6754         getUnknownFields() {
6755       return this.unknownFields;
6756     }
GetResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6757     private GetResponse(
6758         com.google.protobuf.CodedInputStream input,
6759         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6760         throws com.google.protobuf.InvalidProtocolBufferException {
6761       initFields();
6762       int mutable_bitField0_ = 0;
6763       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6764           com.google.protobuf.UnknownFieldSet.newBuilder();
6765       try {
6766         boolean done = false;
6767         while (!done) {
6768           int tag = input.readTag();
6769           switch (tag) {
6770             case 0:
6771               done = true;
6772               break;
6773             default: {
6774               if (!parseUnknownField(input, unknownFields,
6775                                      extensionRegistry, tag)) {
6776                 done = true;
6777               }
6778               break;
6779             }
6780             case 10: {
6781               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
6782               if (((bitField0_ & 0x00000001) == 0x00000001)) {
6783                 subBuilder = result_.toBuilder();
6784               }
6785               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
6786               if (subBuilder != null) {
6787                 subBuilder.mergeFrom(result_);
6788                 result_ = subBuilder.buildPartial();
6789               }
6790               bitField0_ |= 0x00000001;
6791               break;
6792             }
6793           }
6794         }
6795       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6796         throw e.setUnfinishedMessage(this);
6797       } catch (java.io.IOException e) {
6798         throw new com.google.protobuf.InvalidProtocolBufferException(
6799             e.getMessage()).setUnfinishedMessage(this);
6800       } finally {
6801         this.unknownFields = unknownFields.build();
6802         makeExtensionsImmutable();
6803       }
6804     }
6805     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6806         getDescriptor() {
6807       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor;
6808     }
6809 
6810     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6811         internalGetFieldAccessorTable() {
6812       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable
6813           .ensureFieldAccessorsInitialized(
6814               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
6815     }
6816 
6817     public static com.google.protobuf.Parser<GetResponse> PARSER =
6818         new com.google.protobuf.AbstractParser<GetResponse>() {
6819       public GetResponse parsePartialFrom(
6820           com.google.protobuf.CodedInputStream input,
6821           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6822           throws com.google.protobuf.InvalidProtocolBufferException {
6823         return new GetResponse(input, extensionRegistry);
6824       }
6825     };
6826 
6827     @java.lang.Override
getParserForType()6828     public com.google.protobuf.Parser<GetResponse> getParserForType() {
6829       return PARSER;
6830     }
6831 
6832     private int bitField0_;
6833     // optional .Result result = 1;
6834     public static final int RESULT_FIELD_NUMBER = 1;
6835     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
6836     /**
6837      * <code>optional .Result result = 1;</code>
6838      */
hasResult()6839     public boolean hasResult() {
6840       return ((bitField0_ & 0x00000001) == 0x00000001);
6841     }
6842     /**
6843      * <code>optional .Result result = 1;</code>
6844      */
getResult()6845     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
6846       return result_;
6847     }
6848     /**
6849      * <code>optional .Result result = 1;</code>
6850      */
getResultOrBuilder()6851     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
6852       return result_;
6853     }
6854 
initFields()6855     private void initFields() {
6856       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
6857     }
6858     private byte memoizedIsInitialized = -1;
isInitialized()6859     public final boolean isInitialized() {
6860       byte isInitialized = memoizedIsInitialized;
6861       if (isInitialized != -1) return isInitialized == 1;
6862 
6863       memoizedIsInitialized = 1;
6864       return true;
6865     }
6866 
writeTo(com.google.protobuf.CodedOutputStream output)6867     public void writeTo(com.google.protobuf.CodedOutputStream output)
6868                         throws java.io.IOException {
6869       getSerializedSize();
6870       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6871         output.writeMessage(1, result_);
6872       }
6873       getUnknownFields().writeTo(output);
6874     }
6875 
6876     private int memoizedSerializedSize = -1;
getSerializedSize()6877     public int getSerializedSize() {
6878       int size = memoizedSerializedSize;
6879       if (size != -1) return size;
6880 
6881       size = 0;
6882       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6883         size += com.google.protobuf.CodedOutputStream
6884           .computeMessageSize(1, result_);
6885       }
6886       size += getUnknownFields().getSerializedSize();
6887       memoizedSerializedSize = size;
6888       return size;
6889     }
6890 
6891     private static final long serialVersionUID = 0L;
6892     @java.lang.Override
writeReplace()6893     protected java.lang.Object writeReplace()
6894         throws java.io.ObjectStreamException {
6895       return super.writeReplace();
6896     }
6897 
6898     @java.lang.Override
equals(final java.lang.Object obj)6899     public boolean equals(final java.lang.Object obj) {
6900       if (obj == this) {
6901        return true;
6902       }
6903       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) {
6904         return super.equals(obj);
6905       }
6906       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj;
6907 
6908       boolean result = true;
6909       result = result && (hasResult() == other.hasResult());
6910       if (hasResult()) {
6911         result = result && getResult()
6912             .equals(other.getResult());
6913       }
6914       result = result &&
6915           getUnknownFields().equals(other.getUnknownFields());
6916       return result;
6917     }
6918 
6919     private int memoizedHashCode = 0;
6920     @java.lang.Override
hashCode()6921     public int hashCode() {
6922       if (memoizedHashCode != 0) {
6923         return memoizedHashCode;
6924       }
6925       int hash = 41;
6926       hash = (19 * hash) + getDescriptorForType().hashCode();
6927       if (hasResult()) {
6928         hash = (37 * hash) + RESULT_FIELD_NUMBER;
6929         hash = (53 * hash) + getResult().hashCode();
6930       }
6931       hash = (29 * hash) + getUnknownFields().hashCode();
6932       memoizedHashCode = hash;
6933       return hash;
6934     }
6935 
parseFrom( com.google.protobuf.ByteString data)6936     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6937         com.google.protobuf.ByteString data)
6938         throws com.google.protobuf.InvalidProtocolBufferException {
6939       return PARSER.parseFrom(data);
6940     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6941     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6942         com.google.protobuf.ByteString data,
6943         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6944         throws com.google.protobuf.InvalidProtocolBufferException {
6945       return PARSER.parseFrom(data, extensionRegistry);
6946     }
parseFrom(byte[] data)6947     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data)
6948         throws com.google.protobuf.InvalidProtocolBufferException {
6949       return PARSER.parseFrom(data);
6950     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6951     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6952         byte[] data,
6953         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6954         throws com.google.protobuf.InvalidProtocolBufferException {
6955       return PARSER.parseFrom(data, extensionRegistry);
6956     }
parseFrom(java.io.InputStream input)6957     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input)
6958         throws java.io.IOException {
6959       return PARSER.parseFrom(input);
6960     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6961     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6962         java.io.InputStream input,
6963         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6964         throws java.io.IOException {
6965       return PARSER.parseFrom(input, extensionRegistry);
6966     }
parseDelimitedFrom(java.io.InputStream input)6967     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input)
6968         throws java.io.IOException {
6969       return PARSER.parseDelimitedFrom(input);
6970     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6971     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(
6972         java.io.InputStream input,
6973         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6974         throws java.io.IOException {
6975       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6976     }
parseFrom( com.google.protobuf.CodedInputStream input)6977     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6978         com.google.protobuf.CodedInputStream input)
6979         throws java.io.IOException {
6980       return PARSER.parseFrom(input);
6981     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6982     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(
6983         com.google.protobuf.CodedInputStream input,
6984         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6985         throws java.io.IOException {
6986       return PARSER.parseFrom(input, extensionRegistry);
6987     }
6988 
newBuilder()6989     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6990     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype)6991     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) {
6992       return newBuilder().mergeFrom(prototype);
6993     }
toBuilder()6994     public Builder toBuilder() { return newBuilder(this); }
6995 
6996     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6997     protected Builder newBuilderForType(
6998         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6999       Builder builder = new Builder(parent);
7000       return builder;
7001     }
7002     /**
7003      * Protobuf type {@code GetResponse}
7004      */
7005     public static final class Builder extends
7006         com.google.protobuf.GeneratedMessage.Builder<Builder>
7007        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder {
7008       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7009           getDescriptor() {
7010         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor;
7011       }
7012 
7013       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7014           internalGetFieldAccessorTable() {
7015         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable
7016             .ensureFieldAccessorsInitialized(
7017                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class);
7018       }
7019 
7020       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder()
Builder()7021       private Builder() {
7022         maybeForceBuilderInitialization();
7023       }
7024 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7025       private Builder(
7026           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7027         super(parent);
7028         maybeForceBuilderInitialization();
7029       }
maybeForceBuilderInitialization()7030       private void maybeForceBuilderInitialization() {
7031         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7032           getResultFieldBuilder();
7033         }
7034       }
create()7035       private static Builder create() {
7036         return new Builder();
7037       }
7038 
clear()7039       public Builder clear() {
7040         super.clear();
7041         if (resultBuilder_ == null) {
7042           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
7043         } else {
7044           resultBuilder_.clear();
7045         }
7046         bitField0_ = (bitField0_ & ~0x00000001);
7047         return this;
7048       }
7049 
clone()7050       public Builder clone() {
7051         return create().mergeFrom(buildPartial());
7052       }
7053 
7054       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7055           getDescriptorForType() {
7056         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor;
7057       }
7058 
getDefaultInstanceForType()7059       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() {
7060         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
7061       }
7062 
build()7063       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() {
7064         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial();
7065         if (!result.isInitialized()) {
7066           throw newUninitializedMessageException(result);
7067         }
7068         return result;
7069       }
7070 
buildPartial()7071       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() {
7072         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this);
7073         int from_bitField0_ = bitField0_;
7074         int to_bitField0_ = 0;
7075         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7076           to_bitField0_ |= 0x00000001;
7077         }
7078         if (resultBuilder_ == null) {
7079           result.result_ = result_;
7080         } else {
7081           result.result_ = resultBuilder_.build();
7082         }
7083         result.bitField0_ = to_bitField0_;
7084         onBuilt();
7085         return result;
7086       }
7087 
mergeFrom(com.google.protobuf.Message other)7088       public Builder mergeFrom(com.google.protobuf.Message other) {
7089         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) {
7090           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other);
7091         } else {
7092           super.mergeFrom(other);
7093           return this;
7094         }
7095       }
7096 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other)7097       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) {
7098         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this;
7099         if (other.hasResult()) {
7100           mergeResult(other.getResult());
7101         }
7102         this.mergeUnknownFields(other.getUnknownFields());
7103         return this;
7104       }
7105 
isInitialized()7106       public final boolean isInitialized() {
7107         return true;
7108       }
7109 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7110       public Builder mergeFrom(
7111           com.google.protobuf.CodedInputStream input,
7112           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7113           throws java.io.IOException {
7114         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parsedMessage = null;
7115         try {
7116           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7117         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7118           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage();
7119           throw e;
7120         } finally {
7121           if (parsedMessage != null) {
7122             mergeFrom(parsedMessage);
7123           }
7124         }
7125         return this;
7126       }
7127       private int bitField0_;
7128 
7129       // optional .Result result = 1;
7130       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
7131       private com.google.protobuf.SingleFieldBuilder<
7132           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
7133       /**
7134        * <code>optional .Result result = 1;</code>
7135        */
hasResult()7136       public boolean hasResult() {
7137         return ((bitField0_ & 0x00000001) == 0x00000001);
7138       }
7139       /**
7140        * <code>optional .Result result = 1;</code>
7141        */
getResult()7142       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
7143         if (resultBuilder_ == null) {
7144           return result_;
7145         } else {
7146           return resultBuilder_.getMessage();
7147         }
7148       }
7149       /**
7150        * <code>optional .Result result = 1;</code>
7151        */
setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)7152       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
7153         if (resultBuilder_ == null) {
7154           if (value == null) {
7155             throw new NullPointerException();
7156           }
7157           result_ = value;
7158           onChanged();
7159         } else {
7160           resultBuilder_.setMessage(value);
7161         }
7162         bitField0_ |= 0x00000001;
7163         return this;
7164       }
7165       /**
7166        * <code>optional .Result result = 1;</code>
7167        */
setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)7168       public Builder setResult(
7169           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
7170         if (resultBuilder_ == null) {
7171           result_ = builderForValue.build();
7172           onChanged();
7173         } else {
7174           resultBuilder_.setMessage(builderForValue.build());
7175         }
7176         bitField0_ |= 0x00000001;
7177         return this;
7178       }
7179       /**
7180        * <code>optional .Result result = 1;</code>
7181        */
mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)7182       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
7183         if (resultBuilder_ == null) {
7184           if (((bitField0_ & 0x00000001) == 0x00000001) &&
7185               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
7186             result_ =
7187               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
7188           } else {
7189             result_ = value;
7190           }
7191           onChanged();
7192         } else {
7193           resultBuilder_.mergeFrom(value);
7194         }
7195         bitField0_ |= 0x00000001;
7196         return this;
7197       }
7198       /**
7199        * <code>optional .Result result = 1;</code>
7200        */
clearResult()7201       public Builder clearResult() {
7202         if (resultBuilder_ == null) {
7203           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
7204           onChanged();
7205         } else {
7206           resultBuilder_.clear();
7207         }
7208         bitField0_ = (bitField0_ & ~0x00000001);
7209         return this;
7210       }
7211       /**
7212        * <code>optional .Result result = 1;</code>
7213        */
getResultBuilder()7214       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
7215         bitField0_ |= 0x00000001;
7216         onChanged();
7217         return getResultFieldBuilder().getBuilder();
7218       }
7219       /**
7220        * <code>optional .Result result = 1;</code>
7221        */
getResultOrBuilder()7222       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
7223         if (resultBuilder_ != null) {
7224           return resultBuilder_.getMessageOrBuilder();
7225         } else {
7226           return result_;
7227         }
7228       }
7229       /**
7230        * <code>optional .Result result = 1;</code>
7231        */
7232       private com.google.protobuf.SingleFieldBuilder<
7233           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultFieldBuilder()7234           getResultFieldBuilder() {
7235         if (resultBuilder_ == null) {
7236           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
7237               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
7238                   result_,
7239                   getParentForChildren(),
7240                   isClean());
7241           result_ = null;
7242         }
7243         return resultBuilder_;
7244       }
7245 
7246       // @@protoc_insertion_point(builder_scope:GetResponse)
7247     }
7248 
7249     static {
7250       defaultInstance = new GetResponse(true);
defaultInstance.initFields()7251       defaultInstance.initFields();
7252     }
7253 
7254     // @@protoc_insertion_point(class_scope:GetResponse)
7255   }
7256 
7257   public interface ConditionOrBuilder
7258       extends com.google.protobuf.MessageOrBuilder {
7259 
7260     // required bytes row = 1;
7261     /**
7262      * <code>required bytes row = 1;</code>
7263      */
hasRow()7264     boolean hasRow();
7265     /**
7266      * <code>required bytes row = 1;</code>
7267      */
getRow()7268     com.google.protobuf.ByteString getRow();
7269 
7270     // required bytes family = 2;
7271     /**
7272      * <code>required bytes family = 2;</code>
7273      */
hasFamily()7274     boolean hasFamily();
7275     /**
7276      * <code>required bytes family = 2;</code>
7277      */
getFamily()7278     com.google.protobuf.ByteString getFamily();
7279 
7280     // required bytes qualifier = 3;
7281     /**
7282      * <code>required bytes qualifier = 3;</code>
7283      */
hasQualifier()7284     boolean hasQualifier();
7285     /**
7286      * <code>required bytes qualifier = 3;</code>
7287      */
getQualifier()7288     com.google.protobuf.ByteString getQualifier();
7289 
7290     // required .CompareType compare_type = 4;
7291     /**
7292      * <code>required .CompareType compare_type = 4;</code>
7293      */
hasCompareType()7294     boolean hasCompareType();
7295     /**
7296      * <code>required .CompareType compare_type = 4;</code>
7297      */
getCompareType()7298     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType();
7299 
7300     // required .Comparator comparator = 5;
7301     /**
7302      * <code>required .Comparator comparator = 5;</code>
7303      */
hasComparator()7304     boolean hasComparator();
7305     /**
7306      * <code>required .Comparator comparator = 5;</code>
7307      */
getComparator()7308     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator();
7309     /**
7310      * <code>required .Comparator comparator = 5;</code>
7311      */
getComparatorOrBuilder()7312     org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder();
7313   }
7314   /**
7315    * Protobuf type {@code Condition}
7316    *
7317    * <pre>
7318    **
7319    * Condition to check if the value of a given cell (row,
7320    * family, qualifier) matches a value via a given comparator.
7321    *
7322    * Condition is used in check and mutate operations.
7323    * </pre>
7324    */
7325   public static final class Condition extends
7326       com.google.protobuf.GeneratedMessage
7327       implements ConditionOrBuilder {
7328     // Use Condition.newBuilder() to construct.
Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder)7329     private Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7330       super(builder);
7331       this.unknownFields = builder.getUnknownFields();
7332     }
Condition(boolean noInit)7333     private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7334 
7335     private static final Condition defaultInstance;
getDefaultInstance()7336     public static Condition getDefaultInstance() {
7337       return defaultInstance;
7338     }
7339 
getDefaultInstanceForType()7340     public Condition getDefaultInstanceForType() {
7341       return defaultInstance;
7342     }
7343 
7344     private final com.google.protobuf.UnknownFieldSet unknownFields;
7345     @java.lang.Override
7346     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7347         getUnknownFields() {
7348       return this.unknownFields;
7349     }
Condition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7350     private Condition(
7351         com.google.protobuf.CodedInputStream input,
7352         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7353         throws com.google.protobuf.InvalidProtocolBufferException {
7354       initFields();
7355       int mutable_bitField0_ = 0;
7356       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7357           com.google.protobuf.UnknownFieldSet.newBuilder();
7358       try {
7359         boolean done = false;
7360         while (!done) {
7361           int tag = input.readTag();
7362           switch (tag) {
7363             case 0:
7364               done = true;
7365               break;
7366             default: {
7367               if (!parseUnknownField(input, unknownFields,
7368                                      extensionRegistry, tag)) {
7369                 done = true;
7370               }
7371               break;
7372             }
7373             case 10: {
7374               bitField0_ |= 0x00000001;
7375               row_ = input.readBytes();
7376               break;
7377             }
7378             case 18: {
7379               bitField0_ |= 0x00000002;
7380               family_ = input.readBytes();
7381               break;
7382             }
7383             case 26: {
7384               bitField0_ |= 0x00000004;
7385               qualifier_ = input.readBytes();
7386               break;
7387             }
7388             case 32: {
7389               int rawValue = input.readEnum();
7390               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue);
7391               if (value == null) {
7392                 unknownFields.mergeVarintField(4, rawValue);
7393               } else {
7394                 bitField0_ |= 0x00000008;
7395                 compareType_ = value;
7396               }
7397               break;
7398             }
7399             case 42: {
7400               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null;
7401               if (((bitField0_ & 0x00000010) == 0x00000010)) {
7402                 subBuilder = comparator_.toBuilder();
7403               }
7404               comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry);
7405               if (subBuilder != null) {
7406                 subBuilder.mergeFrom(comparator_);
7407                 comparator_ = subBuilder.buildPartial();
7408               }
7409               bitField0_ |= 0x00000010;
7410               break;
7411             }
7412           }
7413         }
7414       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7415         throw e.setUnfinishedMessage(this);
7416       } catch (java.io.IOException e) {
7417         throw new com.google.protobuf.InvalidProtocolBufferException(
7418             e.getMessage()).setUnfinishedMessage(this);
7419       } finally {
7420         this.unknownFields = unknownFields.build();
7421         makeExtensionsImmutable();
7422       }
7423     }
7424     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7425         getDescriptor() {
7426       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor;
7427     }
7428 
7429     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7430         internalGetFieldAccessorTable() {
7431       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable
7432           .ensureFieldAccessorsInitialized(
7433               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
7434     }
7435 
7436     public static com.google.protobuf.Parser<Condition> PARSER =
7437         new com.google.protobuf.AbstractParser<Condition>() {
7438       public Condition parsePartialFrom(
7439           com.google.protobuf.CodedInputStream input,
7440           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7441           throws com.google.protobuf.InvalidProtocolBufferException {
7442         return new Condition(input, extensionRegistry);
7443       }
7444     };
7445 
7446     @java.lang.Override
getParserForType()7447     public com.google.protobuf.Parser<Condition> getParserForType() {
7448       return PARSER;
7449     }
7450 
7451     private int bitField0_;
7452     // required bytes row = 1;
7453     public static final int ROW_FIELD_NUMBER = 1;
7454     private com.google.protobuf.ByteString row_;
7455     /**
7456      * <code>required bytes row = 1;</code>
7457      */
hasRow()7458     public boolean hasRow() {
7459       return ((bitField0_ & 0x00000001) == 0x00000001);
7460     }
7461     /**
7462      * <code>required bytes row = 1;</code>
7463      */
getRow()7464     public com.google.protobuf.ByteString getRow() {
7465       return row_;
7466     }
7467 
7468     // required bytes family = 2;
7469     public static final int FAMILY_FIELD_NUMBER = 2;
7470     private com.google.protobuf.ByteString family_;
7471     /**
7472      * <code>required bytes family = 2;</code>
7473      */
hasFamily()7474     public boolean hasFamily() {
7475       return ((bitField0_ & 0x00000002) == 0x00000002);
7476     }
7477     /**
7478      * <code>required bytes family = 2;</code>
7479      */
getFamily()7480     public com.google.protobuf.ByteString getFamily() {
7481       return family_;
7482     }
7483 
7484     // required bytes qualifier = 3;
7485     public static final int QUALIFIER_FIELD_NUMBER = 3;
7486     private com.google.protobuf.ByteString qualifier_;
7487     /**
7488      * <code>required bytes qualifier = 3;</code>
7489      */
hasQualifier()7490     public boolean hasQualifier() {
7491       return ((bitField0_ & 0x00000004) == 0x00000004);
7492     }
7493     /**
7494      * <code>required bytes qualifier = 3;</code>
7495      */
getQualifier()7496     public com.google.protobuf.ByteString getQualifier() {
7497       return qualifier_;
7498     }
7499 
7500     // required .CompareType compare_type = 4;
7501     public static final int COMPARE_TYPE_FIELD_NUMBER = 4;
7502     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_;
7503     /**
7504      * <code>required .CompareType compare_type = 4;</code>
7505      */
hasCompareType()7506     public boolean hasCompareType() {
7507       return ((bitField0_ & 0x00000008) == 0x00000008);
7508     }
7509     /**
7510      * <code>required .CompareType compare_type = 4;</code>
7511      */
getCompareType()7512     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
7513       return compareType_;
7514     }
7515 
7516     // required .Comparator comparator = 5;
7517     public static final int COMPARATOR_FIELD_NUMBER = 5;
7518     private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_;
7519     /**
7520      * <code>required .Comparator comparator = 5;</code>
7521      */
hasComparator()7522     public boolean hasComparator() {
7523       return ((bitField0_ & 0x00000010) == 0x00000010);
7524     }
7525     /**
7526      * <code>required .Comparator comparator = 5;</code>
7527      */
getComparator()7528     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
7529       return comparator_;
7530     }
7531     /**
7532      * <code>required .Comparator comparator = 5;</code>
7533      */
getComparatorOrBuilder()7534     public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
7535       return comparator_;
7536     }
7537 
initFields()7538     private void initFields() {
7539       row_ = com.google.protobuf.ByteString.EMPTY;
7540       family_ = com.google.protobuf.ByteString.EMPTY;
7541       qualifier_ = com.google.protobuf.ByteString.EMPTY;
7542       compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7543       comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7544     }
7545     private byte memoizedIsInitialized = -1;
isInitialized()7546     public final boolean isInitialized() {
7547       byte isInitialized = memoizedIsInitialized;
7548       if (isInitialized != -1) return isInitialized == 1;
7549 
7550       if (!hasRow()) {
7551         memoizedIsInitialized = 0;
7552         return false;
7553       }
7554       if (!hasFamily()) {
7555         memoizedIsInitialized = 0;
7556         return false;
7557       }
7558       if (!hasQualifier()) {
7559         memoizedIsInitialized = 0;
7560         return false;
7561       }
7562       if (!hasCompareType()) {
7563         memoizedIsInitialized = 0;
7564         return false;
7565       }
7566       if (!hasComparator()) {
7567         memoizedIsInitialized = 0;
7568         return false;
7569       }
7570       if (!getComparator().isInitialized()) {
7571         memoizedIsInitialized = 0;
7572         return false;
7573       }
7574       memoizedIsInitialized = 1;
7575       return true;
7576     }
7577 
writeTo(com.google.protobuf.CodedOutputStream output)7578     public void writeTo(com.google.protobuf.CodedOutputStream output)
7579                         throws java.io.IOException {
7580       getSerializedSize();
7581       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7582         output.writeBytes(1, row_);
7583       }
7584       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7585         output.writeBytes(2, family_);
7586       }
7587       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7588         output.writeBytes(3, qualifier_);
7589       }
7590       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7591         output.writeEnum(4, compareType_.getNumber());
7592       }
7593       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7594         output.writeMessage(5, comparator_);
7595       }
7596       getUnknownFields().writeTo(output);
7597     }
7598 
7599     private int memoizedSerializedSize = -1;
getSerializedSize()7600     public int getSerializedSize() {
7601       int size = memoizedSerializedSize;
7602       if (size != -1) return size;
7603 
7604       size = 0;
7605       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7606         size += com.google.protobuf.CodedOutputStream
7607           .computeBytesSize(1, row_);
7608       }
7609       if (((bitField0_ & 0x00000002) == 0x00000002)) {
7610         size += com.google.protobuf.CodedOutputStream
7611           .computeBytesSize(2, family_);
7612       }
7613       if (((bitField0_ & 0x00000004) == 0x00000004)) {
7614         size += com.google.protobuf.CodedOutputStream
7615           .computeBytesSize(3, qualifier_);
7616       }
7617       if (((bitField0_ & 0x00000008) == 0x00000008)) {
7618         size += com.google.protobuf.CodedOutputStream
7619           .computeEnumSize(4, compareType_.getNumber());
7620       }
7621       if (((bitField0_ & 0x00000010) == 0x00000010)) {
7622         size += com.google.protobuf.CodedOutputStream
7623           .computeMessageSize(5, comparator_);
7624       }
7625       size += getUnknownFields().getSerializedSize();
7626       memoizedSerializedSize = size;
7627       return size;
7628     }
7629 
7630     private static final long serialVersionUID = 0L;
7631     @java.lang.Override
writeReplace()7632     protected java.lang.Object writeReplace()
7633         throws java.io.ObjectStreamException {
7634       return super.writeReplace();
7635     }
7636 
7637     @java.lang.Override
equals(final java.lang.Object obj)7638     public boolean equals(final java.lang.Object obj) {
7639       if (obj == this) {
7640        return true;
7641       }
7642       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) {
7643         return super.equals(obj);
7644       }
7645       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj;
7646 
7647       boolean result = true;
7648       result = result && (hasRow() == other.hasRow());
7649       if (hasRow()) {
7650         result = result && getRow()
7651             .equals(other.getRow());
7652       }
7653       result = result && (hasFamily() == other.hasFamily());
7654       if (hasFamily()) {
7655         result = result && getFamily()
7656             .equals(other.getFamily());
7657       }
7658       result = result && (hasQualifier() == other.hasQualifier());
7659       if (hasQualifier()) {
7660         result = result && getQualifier()
7661             .equals(other.getQualifier());
7662       }
7663       result = result && (hasCompareType() == other.hasCompareType());
7664       if (hasCompareType()) {
7665         result = result &&
7666             (getCompareType() == other.getCompareType());
7667       }
7668       result = result && (hasComparator() == other.hasComparator());
7669       if (hasComparator()) {
7670         result = result && getComparator()
7671             .equals(other.getComparator());
7672       }
7673       result = result &&
7674           getUnknownFields().equals(other.getUnknownFields());
7675       return result;
7676     }
7677 
7678     private int memoizedHashCode = 0;
7679     @java.lang.Override
hashCode()7680     public int hashCode() {
7681       if (memoizedHashCode != 0) {
7682         return memoizedHashCode;
7683       }
7684       int hash = 41;
7685       hash = (19 * hash) + getDescriptorForType().hashCode();
7686       if (hasRow()) {
7687         hash = (37 * hash) + ROW_FIELD_NUMBER;
7688         hash = (53 * hash) + getRow().hashCode();
7689       }
7690       if (hasFamily()) {
7691         hash = (37 * hash) + FAMILY_FIELD_NUMBER;
7692         hash = (53 * hash) + getFamily().hashCode();
7693       }
7694       if (hasQualifier()) {
7695         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
7696         hash = (53 * hash) + getQualifier().hashCode();
7697       }
7698       if (hasCompareType()) {
7699         hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER;
7700         hash = (53 * hash) + hashEnum(getCompareType());
7701       }
7702       if (hasComparator()) {
7703         hash = (37 * hash) + COMPARATOR_FIELD_NUMBER;
7704         hash = (53 * hash) + getComparator().hashCode();
7705       }
7706       hash = (29 * hash) + getUnknownFields().hashCode();
7707       memoizedHashCode = hash;
7708       return hash;
7709     }
7710 
parseFrom( com.google.protobuf.ByteString data)7711     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7712         com.google.protobuf.ByteString data)
7713         throws com.google.protobuf.InvalidProtocolBufferException {
7714       return PARSER.parseFrom(data);
7715     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7716     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7717         com.google.protobuf.ByteString data,
7718         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7719         throws com.google.protobuf.InvalidProtocolBufferException {
7720       return PARSER.parseFrom(data, extensionRegistry);
7721     }
parseFrom(byte[] data)7722     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data)
7723         throws com.google.protobuf.InvalidProtocolBufferException {
7724       return PARSER.parseFrom(data);
7725     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7726     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7727         byte[] data,
7728         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7729         throws com.google.protobuf.InvalidProtocolBufferException {
7730       return PARSER.parseFrom(data, extensionRegistry);
7731     }
parseFrom(java.io.InputStream input)7732     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input)
7733         throws java.io.IOException {
7734       return PARSER.parseFrom(input);
7735     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7736     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7737         java.io.InputStream input,
7738         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7739         throws java.io.IOException {
7740       return PARSER.parseFrom(input, extensionRegistry);
7741     }
parseDelimitedFrom(java.io.InputStream input)7742     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input)
7743         throws java.io.IOException {
7744       return PARSER.parseDelimitedFrom(input);
7745     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7746     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(
7747         java.io.InputStream input,
7748         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7749         throws java.io.IOException {
7750       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7751     }
parseFrom( com.google.protobuf.CodedInputStream input)7752     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7753         com.google.protobuf.CodedInputStream input)
7754         throws java.io.IOException {
7755       return PARSER.parseFrom(input);
7756     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7757     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(
7758         com.google.protobuf.CodedInputStream input,
7759         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7760         throws java.io.IOException {
7761       return PARSER.parseFrom(input, extensionRegistry);
7762     }
7763 
newBuilder()7764     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()7765     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype)7766     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) {
7767       return newBuilder().mergeFrom(prototype);
7768     }
toBuilder()7769     public Builder toBuilder() { return newBuilder(this); }
7770 
7771     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7772     protected Builder newBuilderForType(
7773         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7774       Builder builder = new Builder(parent);
7775       return builder;
7776     }
7777     /**
7778      * Protobuf type {@code Condition}
7779      *
7780      * <pre>
7781      **
7782      * Condition to check if the value of a given cell (row,
7783      * family, qualifier) matches a value via a given comparator.
7784      *
7785      * Condition is used in check and mutate operations.
7786      * </pre>
7787      */
7788     public static final class Builder extends
7789         com.google.protobuf.GeneratedMessage.Builder<Builder>
7790        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder {
7791       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7792           getDescriptor() {
7793         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor;
7794       }
7795 
7796       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7797           internalGetFieldAccessorTable() {
7798         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable
7799             .ensureFieldAccessorsInitialized(
7800                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class);
7801       }
7802 
7803       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder()
Builder()7804       private Builder() {
7805         maybeForceBuilderInitialization();
7806       }
7807 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7808       private Builder(
7809           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7810         super(parent);
7811         maybeForceBuilderInitialization();
7812       }
maybeForceBuilderInitialization()7813       private void maybeForceBuilderInitialization() {
7814         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7815           getComparatorFieldBuilder();
7816         }
7817       }
create()7818       private static Builder create() {
7819         return new Builder();
7820       }
7821 
clear()7822       public Builder clear() {
7823         super.clear();
7824         row_ = com.google.protobuf.ByteString.EMPTY;
7825         bitField0_ = (bitField0_ & ~0x00000001);
7826         family_ = com.google.protobuf.ByteString.EMPTY;
7827         bitField0_ = (bitField0_ & ~0x00000002);
7828         qualifier_ = com.google.protobuf.ByteString.EMPTY;
7829         bitField0_ = (bitField0_ & ~0x00000004);
7830         compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
7831         bitField0_ = (bitField0_ & ~0x00000008);
7832         if (comparatorBuilder_ == null) {
7833           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
7834         } else {
7835           comparatorBuilder_.clear();
7836         }
7837         bitField0_ = (bitField0_ & ~0x00000010);
7838         return this;
7839       }
7840 
clone()7841       public Builder clone() {
7842         return create().mergeFrom(buildPartial());
7843       }
7844 
7845       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7846           getDescriptorForType() {
7847         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor;
7848       }
7849 
getDefaultInstanceForType()7850       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() {
7851         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
7852       }
7853 
build()7854       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() {
7855         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial();
7856         if (!result.isInitialized()) {
7857           throw newUninitializedMessageException(result);
7858         }
7859         return result;
7860       }
7861 
buildPartial()7862       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() {
7863         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this);
7864         int from_bitField0_ = bitField0_;
7865         int to_bitField0_ = 0;
7866         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7867           to_bitField0_ |= 0x00000001;
7868         }
7869         result.row_ = row_;
7870         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
7871           to_bitField0_ |= 0x00000002;
7872         }
7873         result.family_ = family_;
7874         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
7875           to_bitField0_ |= 0x00000004;
7876         }
7877         result.qualifier_ = qualifier_;
7878         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
7879           to_bitField0_ |= 0x00000008;
7880         }
7881         result.compareType_ = compareType_;
7882         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
7883           to_bitField0_ |= 0x00000010;
7884         }
7885         if (comparatorBuilder_ == null) {
7886           result.comparator_ = comparator_;
7887         } else {
7888           result.comparator_ = comparatorBuilder_.build();
7889         }
7890         result.bitField0_ = to_bitField0_;
7891         onBuilt();
7892         return result;
7893       }
7894 
mergeFrom(com.google.protobuf.Message other)7895       public Builder mergeFrom(com.google.protobuf.Message other) {
7896         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) {
7897           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other);
7898         } else {
7899           super.mergeFrom(other);
7900           return this;
7901         }
7902       }
7903 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other)7904       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) {
7905         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this;
7906         if (other.hasRow()) {
7907           setRow(other.getRow());
7908         }
7909         if (other.hasFamily()) {
7910           setFamily(other.getFamily());
7911         }
7912         if (other.hasQualifier()) {
7913           setQualifier(other.getQualifier());
7914         }
7915         if (other.hasCompareType()) {
7916           setCompareType(other.getCompareType());
7917         }
7918         if (other.hasComparator()) {
7919           mergeComparator(other.getComparator());
7920         }
7921         this.mergeUnknownFields(other.getUnknownFields());
7922         return this;
7923       }
7924 
isInitialized()7925       public final boolean isInitialized() {
7926         if (!hasRow()) {
7927 
7928           return false;
7929         }
7930         if (!hasFamily()) {
7931 
7932           return false;
7933         }
7934         if (!hasQualifier()) {
7935 
7936           return false;
7937         }
7938         if (!hasCompareType()) {
7939 
7940           return false;
7941         }
7942         if (!hasComparator()) {
7943 
7944           return false;
7945         }
7946         if (!getComparator().isInitialized()) {
7947 
7948           return false;
7949         }
7950         return true;
7951       }
7952 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7953       public Builder mergeFrom(
7954           com.google.protobuf.CodedInputStream input,
7955           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7956           throws java.io.IOException {
7957         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parsedMessage = null;
7958         try {
7959           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7960         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7961           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage();
7962           throw e;
7963         } finally {
7964           if (parsedMessage != null) {
7965             mergeFrom(parsedMessage);
7966           }
7967         }
7968         return this;
7969       }
7970       private int bitField0_;
7971 
7972       // required bytes row = 1;
7973       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
7974       /**
7975        * <code>required bytes row = 1;</code>
7976        */
hasRow()7977       public boolean hasRow() {
7978         return ((bitField0_ & 0x00000001) == 0x00000001);
7979       }
7980       /**
7981        * <code>required bytes row = 1;</code>
7982        */
getRow()7983       public com.google.protobuf.ByteString getRow() {
7984         return row_;
7985       }
7986       /**
7987        * <code>required bytes row = 1;</code>
7988        */
setRow(com.google.protobuf.ByteString value)7989       public Builder setRow(com.google.protobuf.ByteString value) {
7990         if (value == null) {
7991     throw new NullPointerException();
7992   }
7993   bitField0_ |= 0x00000001;
7994         row_ = value;
7995         onChanged();
7996         return this;
7997       }
7998       /**
7999        * <code>required bytes row = 1;</code>
8000        */
clearRow()8001       public Builder clearRow() {
8002         bitField0_ = (bitField0_ & ~0x00000001);
8003         row_ = getDefaultInstance().getRow();
8004         onChanged();
8005         return this;
8006       }
8007 
8008       // required bytes family = 2;
8009       private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
8010       /**
8011        * <code>required bytes family = 2;</code>
8012        */
hasFamily()8013       public boolean hasFamily() {
8014         return ((bitField0_ & 0x00000002) == 0x00000002);
8015       }
8016       /**
8017        * <code>required bytes family = 2;</code>
8018        */
getFamily()8019       public com.google.protobuf.ByteString getFamily() {
8020         return family_;
8021       }
8022       /**
8023        * <code>required bytes family = 2;</code>
8024        */
setFamily(com.google.protobuf.ByteString value)8025       public Builder setFamily(com.google.protobuf.ByteString value) {
8026         if (value == null) {
8027     throw new NullPointerException();
8028   }
8029   bitField0_ |= 0x00000002;
8030         family_ = value;
8031         onChanged();
8032         return this;
8033       }
8034       /**
8035        * <code>required bytes family = 2;</code>
8036        */
clearFamily()8037       public Builder clearFamily() {
8038         bitField0_ = (bitField0_ & ~0x00000002);
8039         family_ = getDefaultInstance().getFamily();
8040         onChanged();
8041         return this;
8042       }
8043 
8044       // required bytes qualifier = 3;
8045       private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
8046       /**
8047        * <code>required bytes qualifier = 3;</code>
8048        */
hasQualifier()8049       public boolean hasQualifier() {
8050         return ((bitField0_ & 0x00000004) == 0x00000004);
8051       }
8052       /**
8053        * <code>required bytes qualifier = 3;</code>
8054        */
getQualifier()8055       public com.google.protobuf.ByteString getQualifier() {
8056         return qualifier_;
8057       }
8058       /**
8059        * <code>required bytes qualifier = 3;</code>
8060        */
setQualifier(com.google.protobuf.ByteString value)8061       public Builder setQualifier(com.google.protobuf.ByteString value) {
8062         if (value == null) {
8063     throw new NullPointerException();
8064   }
8065   bitField0_ |= 0x00000004;
8066         qualifier_ = value;
8067         onChanged();
8068         return this;
8069       }
8070       /**
8071        * <code>required bytes qualifier = 3;</code>
8072        */
clearQualifier()8073       public Builder clearQualifier() {
8074         bitField0_ = (bitField0_ & ~0x00000004);
8075         qualifier_ = getDefaultInstance().getQualifier();
8076         onChanged();
8077         return this;
8078       }
8079 
8080       // required .CompareType compare_type = 4;
8081       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
8082       /**
8083        * <code>required .CompareType compare_type = 4;</code>
8084        */
hasCompareType()8085       public boolean hasCompareType() {
8086         return ((bitField0_ & 0x00000008) == 0x00000008);
8087       }
8088       /**
8089        * <code>required .CompareType compare_type = 4;</code>
8090        */
getCompareType()8091       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() {
8092         return compareType_;
8093       }
8094       /**
8095        * <code>required .CompareType compare_type = 4;</code>
8096        */
setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value)8097       public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) {
8098         if (value == null) {
8099           throw new NullPointerException();
8100         }
8101         bitField0_ |= 0x00000008;
8102         compareType_ = value;
8103         onChanged();
8104         return this;
8105       }
8106       /**
8107        * <code>required .CompareType compare_type = 4;</code>
8108        */
clearCompareType()8109       public Builder clearCompareType() {
8110         bitField0_ = (bitField0_ & ~0x00000008);
8111         compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS;
8112         onChanged();
8113         return this;
8114       }
8115 
8116       // required .Comparator comparator = 5;
8117       private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
8118       private com.google.protobuf.SingleFieldBuilder<
8119           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_;
8120       /**
8121        * <code>required .Comparator comparator = 5;</code>
8122        */
hasComparator()8123       public boolean hasComparator() {
8124         return ((bitField0_ & 0x00000010) == 0x00000010);
8125       }
8126       /**
8127        * <code>required .Comparator comparator = 5;</code>
8128        */
getComparator()8129       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() {
8130         if (comparatorBuilder_ == null) {
8131           return comparator_;
8132         } else {
8133           return comparatorBuilder_.getMessage();
8134         }
8135       }
8136       /**
8137        * <code>required .Comparator comparator = 5;</code>
8138        */
setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)8139       public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
8140         if (comparatorBuilder_ == null) {
8141           if (value == null) {
8142             throw new NullPointerException();
8143           }
8144           comparator_ = value;
8145           onChanged();
8146         } else {
8147           comparatorBuilder_.setMessage(value);
8148         }
8149         bitField0_ |= 0x00000010;
8150         return this;
8151       }
8152       /**
8153        * <code>required .Comparator comparator = 5;</code>
8154        */
setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue)8155       public Builder setComparator(
8156           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) {
8157         if (comparatorBuilder_ == null) {
8158           comparator_ = builderForValue.build();
8159           onChanged();
8160         } else {
8161           comparatorBuilder_.setMessage(builderForValue.build());
8162         }
8163         bitField0_ |= 0x00000010;
8164         return this;
8165       }
8166       /**
8167        * <code>required .Comparator comparator = 5;</code>
8168        */
mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)8169       public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) {
8170         if (comparatorBuilder_ == null) {
8171           if (((bitField0_ & 0x00000010) == 0x00000010) &&
8172               comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) {
8173             comparator_ =
8174               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial();
8175           } else {
8176             comparator_ = value;
8177           }
8178           onChanged();
8179         } else {
8180           comparatorBuilder_.mergeFrom(value);
8181         }
8182         bitField0_ |= 0x00000010;
8183         return this;
8184       }
8185       /**
8186        * <code>required .Comparator comparator = 5;</code>
8187        */
clearComparator()8188       public Builder clearComparator() {
8189         if (comparatorBuilder_ == null) {
8190           comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance();
8191           onChanged();
8192         } else {
8193           comparatorBuilder_.clear();
8194         }
8195         bitField0_ = (bitField0_ & ~0x00000010);
8196         return this;
8197       }
8198       /**
8199        * <code>required .Comparator comparator = 5;</code>
8200        */
getComparatorBuilder()8201       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() {
8202         bitField0_ |= 0x00000010;
8203         onChanged();
8204         return getComparatorFieldBuilder().getBuilder();
8205       }
8206       /**
8207        * <code>required .Comparator comparator = 5;</code>
8208        */
getComparatorOrBuilder()8209       public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() {
8210         if (comparatorBuilder_ != null) {
8211           return comparatorBuilder_.getMessageOrBuilder();
8212         } else {
8213           return comparator_;
8214         }
8215       }
8216       /**
8217        * <code>required .Comparator comparator = 5;</code>
8218        */
8219       private com.google.protobuf.SingleFieldBuilder<
8220           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>
getComparatorFieldBuilder()8221           getComparatorFieldBuilder() {
8222         if (comparatorBuilder_ == null) {
8223           comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8224               org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>(
8225                   comparator_,
8226                   getParentForChildren(),
8227                   isClean());
8228           comparator_ = null;
8229         }
8230         return comparatorBuilder_;
8231       }
8232 
8233       // @@protoc_insertion_point(builder_scope:Condition)
8234     }
8235 
8236     static {
8237       defaultInstance = new Condition(true);
defaultInstance.initFields()8238       defaultInstance.initFields();
8239     }
8240 
8241     // @@protoc_insertion_point(class_scope:Condition)
8242   }
8243 
8244   public interface MutationProtoOrBuilder
8245       extends com.google.protobuf.MessageOrBuilder {
8246 
8247     // optional bytes row = 1;
8248     /**
8249      * <code>optional bytes row = 1;</code>
8250      */
hasRow()8251     boolean hasRow();
8252     /**
8253      * <code>optional bytes row = 1;</code>
8254      */
getRow()8255     com.google.protobuf.ByteString getRow();
8256 
8257     // optional .MutationProto.MutationType mutate_type = 2;
8258     /**
8259      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
8260      */
hasMutateType()8261     boolean hasMutateType();
8262     /**
8263      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
8264      */
getMutateType()8265     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType();
8266 
8267     // repeated .MutationProto.ColumnValue column_value = 3;
8268     /**
8269      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
8270      */
8271     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>
getColumnValueList()8272         getColumnValueList();
8273     /**
8274      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
8275      */
getColumnValue(int index)8276     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index);
8277     /**
8278      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
8279      */
getColumnValueCount()8280     int getColumnValueCount();
8281     /**
8282      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
8283      */
8284     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueOrBuilderList()8285         getColumnValueOrBuilderList();
8286     /**
8287      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
8288      */
getColumnValueOrBuilder( int index)8289     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
8290         int index);
8291 
8292     // optional uint64 timestamp = 4;
8293     /**
8294      * <code>optional uint64 timestamp = 4;</code>
8295      */
hasTimestamp()8296     boolean hasTimestamp();
8297     /**
8298      * <code>optional uint64 timestamp = 4;</code>
8299      */
getTimestamp()8300     long getTimestamp();
8301 
8302     // repeated .NameBytesPair attribute = 5;
8303     /**
8304      * <code>repeated .NameBytesPair attribute = 5;</code>
8305      */
8306     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>
getAttributeList()8307         getAttributeList();
8308     /**
8309      * <code>repeated .NameBytesPair attribute = 5;</code>
8310      */
getAttribute(int index)8311     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
8312     /**
8313      * <code>repeated .NameBytesPair attribute = 5;</code>
8314      */
getAttributeCount()8315     int getAttributeCount();
8316     /**
8317      * <code>repeated .NameBytesPair attribute = 5;</code>
8318      */
8319     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()8320         getAttributeOrBuilderList();
8321     /**
8322      * <code>repeated .NameBytesPair attribute = 5;</code>
8323      */
getAttributeOrBuilder( int index)8324     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
8325         int index);
8326 
8327     // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];
8328     /**
8329      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
8330      */
hasDurability()8331     boolean hasDurability();
8332     /**
8333      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
8334      */
getDurability()8335     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability();
8336 
8337     // optional .TimeRange time_range = 7;
8338     /**
8339      * <code>optional .TimeRange time_range = 7;</code>
8340      *
8341      * <pre>
8342      * For some mutations, a result may be returned, in which case,
8343      * time range can be specified for potential performance gain
8344      * </pre>
8345      */
hasTimeRange()8346     boolean hasTimeRange();
8347     /**
8348      * <code>optional .TimeRange time_range = 7;</code>
8349      *
8350      * <pre>
8351      * For some mutations, a result may be returned, in which case,
8352      * time range can be specified for potential performance gain
8353      * </pre>
8354      */
getTimeRange()8355     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
8356     /**
8357      * <code>optional .TimeRange time_range = 7;</code>
8358      *
8359      * <pre>
8360      * For some mutations, a result may be returned, in which case,
8361      * time range can be specified for potential performance gain
8362      * </pre>
8363      */
getTimeRangeOrBuilder()8364     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
8365 
8366     // optional int32 associated_cell_count = 8;
8367     /**
8368      * <code>optional int32 associated_cell_count = 8;</code>
8369      *
8370      * <pre>
8371      * The below count is set when the associated cells are NOT
8372      * part of this protobuf message; they are passed alongside
8373      * and then this Message is a placeholder with metadata.  The
8374      * count is needed to know how many to peel off the block of Cells as
8375      * ours.  NOTE: This is different from the pb managed cell_count of the
8376      * 'cell' field above which is non-null when the cells are pb'd.
8377      * </pre>
8378      */
hasAssociatedCellCount()8379     boolean hasAssociatedCellCount();
8380     /**
8381      * <code>optional int32 associated_cell_count = 8;</code>
8382      *
8383      * <pre>
8384      * The below count is set when the associated cells are NOT
8385      * part of this protobuf message; they are passed alongside
8386      * and then this Message is a placeholder with metadata.  The
8387      * count is needed to know how many to peel off the block of Cells as
8388      * ours.  NOTE: This is different from the pb managed cell_count of the
8389      * 'cell' field above which is non-null when the cells are pb'd.
8390      * </pre>
8391      */
getAssociatedCellCount()8392     int getAssociatedCellCount();
8393 
8394     // optional uint64 nonce = 9;
8395     /**
8396      * <code>optional uint64 nonce = 9;</code>
8397      */
hasNonce()8398     boolean hasNonce();
8399     /**
8400      * <code>optional uint64 nonce = 9;</code>
8401      */
getNonce()8402     long getNonce();
8403   }
8404   /**
8405    * Protobuf type {@code MutationProto}
8406    *
8407    * <pre>
8408    **
8409    * A specific mutation inside a mutate request.
8410    * It can be an append, increment, put or delete based
8411    * on the mutation type.  It can be fully filled in or
8412    * only metadata present because data is being carried
8413    * elsewhere outside of pb.
8414    * </pre>
8415    */
8416   public static final class MutationProto extends
8417       com.google.protobuf.GeneratedMessage
8418       implements MutationProtoOrBuilder {
8419     // Use MutationProto.newBuilder() to construct.
MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder)8420     private MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8421       super(builder);
8422       this.unknownFields = builder.getUnknownFields();
8423     }
MutationProto(boolean noInit)8424     private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8425 
8426     private static final MutationProto defaultInstance;
getDefaultInstance()8427     public static MutationProto getDefaultInstance() {
8428       return defaultInstance;
8429     }
8430 
getDefaultInstanceForType()8431     public MutationProto getDefaultInstanceForType() {
8432       return defaultInstance;
8433     }
8434 
8435     private final com.google.protobuf.UnknownFieldSet unknownFields;
8436     @java.lang.Override
8437     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8438         getUnknownFields() {
8439       return this.unknownFields;
8440     }
MutationProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8441     private MutationProto(
8442         com.google.protobuf.CodedInputStream input,
8443         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8444         throws com.google.protobuf.InvalidProtocolBufferException {
8445       initFields();
8446       int mutable_bitField0_ = 0;
8447       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8448           com.google.protobuf.UnknownFieldSet.newBuilder();
8449       try {
8450         boolean done = false;
8451         while (!done) {
8452           int tag = input.readTag();
8453           switch (tag) {
8454             case 0:
8455               done = true;
8456               break;
8457             default: {
8458               if (!parseUnknownField(input, unknownFields,
8459                                      extensionRegistry, tag)) {
8460                 done = true;
8461               }
8462               break;
8463             }
8464             case 10: {
8465               bitField0_ |= 0x00000001;
8466               row_ = input.readBytes();
8467               break;
8468             }
8469             case 16: {
8470               int rawValue = input.readEnum();
8471               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue);
8472               if (value == null) {
8473                 unknownFields.mergeVarintField(2, rawValue);
8474               } else {
8475                 bitField0_ |= 0x00000002;
8476                 mutateType_ = value;
8477               }
8478               break;
8479             }
8480             case 26: {
8481               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8482                 columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>();
8483                 mutable_bitField0_ |= 0x00000004;
8484               }
8485               columnValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry));
8486               break;
8487             }
8488             case 32: {
8489               bitField0_ |= 0x00000004;
8490               timestamp_ = input.readUInt64();
8491               break;
8492             }
8493             case 42: {
8494               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
8495                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
8496                 mutable_bitField0_ |= 0x00000010;
8497               }
8498               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
8499               break;
8500             }
8501             case 48: {
8502               int rawValue = input.readEnum();
8503               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(rawValue);
8504               if (value == null) {
8505                 unknownFields.mergeVarintField(6, rawValue);
8506               } else {
8507                 bitField0_ |= 0x00000008;
8508                 durability_ = value;
8509               }
8510               break;
8511             }
8512             case 58: {
8513               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
8514               if (((bitField0_ & 0x00000010) == 0x00000010)) {
8515                 subBuilder = timeRange_.toBuilder();
8516               }
8517               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
8518               if (subBuilder != null) {
8519                 subBuilder.mergeFrom(timeRange_);
8520                 timeRange_ = subBuilder.buildPartial();
8521               }
8522               bitField0_ |= 0x00000010;
8523               break;
8524             }
8525             case 64: {
8526               bitField0_ |= 0x00000020;
8527               associatedCellCount_ = input.readInt32();
8528               break;
8529             }
8530             case 72: {
8531               bitField0_ |= 0x00000040;
8532               nonce_ = input.readUInt64();
8533               break;
8534             }
8535           }
8536         }
8537       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8538         throw e.setUnfinishedMessage(this);
8539       } catch (java.io.IOException e) {
8540         throw new com.google.protobuf.InvalidProtocolBufferException(
8541             e.getMessage()).setUnfinishedMessage(this);
8542       } finally {
8543         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
8544           columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
8545         }
8546         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
8547           attribute_ = java.util.Collections.unmodifiableList(attribute_);
8548         }
8549         this.unknownFields = unknownFields.build();
8550         makeExtensionsImmutable();
8551       }
8552     }
8553     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8554         getDescriptor() {
8555       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor;
8556     }
8557 
8558     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8559         internalGetFieldAccessorTable() {
8560       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable
8561           .ensureFieldAccessorsInitialized(
8562               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
8563     }
8564 
8565     public static com.google.protobuf.Parser<MutationProto> PARSER =
8566         new com.google.protobuf.AbstractParser<MutationProto>() {
8567       public MutationProto parsePartialFrom(
8568           com.google.protobuf.CodedInputStream input,
8569           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8570           throws com.google.protobuf.InvalidProtocolBufferException {
8571         return new MutationProto(input, extensionRegistry);
8572       }
8573     };
8574 
8575     @java.lang.Override
getParserForType()8576     public com.google.protobuf.Parser<MutationProto> getParserForType() {
8577       return PARSER;
8578     }
8579 
8580     /**
8581      * Protobuf enum {@code MutationProto.Durability}
8582      */
8583     public enum Durability
8584         implements com.google.protobuf.ProtocolMessageEnum {
8585       /**
8586        * <code>USE_DEFAULT = 0;</code>
8587        */
8588       USE_DEFAULT(0, 0),
8589       /**
8590        * <code>SKIP_WAL = 1;</code>
8591        */
8592       SKIP_WAL(1, 1),
8593       /**
8594        * <code>ASYNC_WAL = 2;</code>
8595        */
8596       ASYNC_WAL(2, 2),
8597       /**
8598        * <code>SYNC_WAL = 3;</code>
8599        */
8600       SYNC_WAL(3, 3),
8601       /**
8602        * <code>FSYNC_WAL = 4;</code>
8603        */
8604       FSYNC_WAL(4, 4),
8605       ;
8606 
8607       /**
8608        * <code>USE_DEFAULT = 0;</code>
8609        */
8610       public static final int USE_DEFAULT_VALUE = 0;
8611       /**
8612        * <code>SKIP_WAL = 1;</code>
8613        */
8614       public static final int SKIP_WAL_VALUE = 1;
8615       /**
8616        * <code>ASYNC_WAL = 2;</code>
8617        */
8618       public static final int ASYNC_WAL_VALUE = 2;
8619       /**
8620        * <code>SYNC_WAL = 3;</code>
8621        */
8622       public static final int SYNC_WAL_VALUE = 3;
8623       /**
8624        * <code>FSYNC_WAL = 4;</code>
8625        */
8626       public static final int FSYNC_WAL_VALUE = 4;
8627 
8628 
getNumber()8629       public final int getNumber() { return value; }
8630 
valueOf(int value)8631       public static Durability valueOf(int value) {
8632         switch (value) {
8633           case 0: return USE_DEFAULT;
8634           case 1: return SKIP_WAL;
8635           case 2: return ASYNC_WAL;
8636           case 3: return SYNC_WAL;
8637           case 4: return FSYNC_WAL;
8638           default: return null;
8639         }
8640       }
8641 
8642       public static com.google.protobuf.Internal.EnumLiteMap<Durability>
internalGetValueMap()8643           internalGetValueMap() {
8644         return internalValueMap;
8645       }
8646       private static com.google.protobuf.Internal.EnumLiteMap<Durability>
8647           internalValueMap =
8648             new com.google.protobuf.Internal.EnumLiteMap<Durability>() {
8649               public Durability findValueByNumber(int number) {
8650                 return Durability.valueOf(number);
8651               }
8652             };
8653 
8654       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()8655           getValueDescriptor() {
8656         return getDescriptor().getValues().get(index);
8657       }
8658       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()8659           getDescriptorForType() {
8660         return getDescriptor();
8661       }
8662       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()8663           getDescriptor() {
8664         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0);
8665       }
8666 
8667       private static final Durability[] VALUES = values();
8668 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)8669       public static Durability valueOf(
8670           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8671         if (desc.getType() != getDescriptor()) {
8672           throw new java.lang.IllegalArgumentException(
8673             "EnumValueDescriptor is not for this type.");
8674         }
8675         return VALUES[desc.getIndex()];
8676       }
8677 
8678       private final int index;
8679       private final int value;
8680 
Durability(int index, int value)8681       private Durability(int index, int value) {
8682         this.index = index;
8683         this.value = value;
8684       }
8685 
8686       // @@protoc_insertion_point(enum_scope:MutationProto.Durability)
8687     }
8688 
8689     /**
8690      * Protobuf enum {@code MutationProto.MutationType}
8691      */
8692     public enum MutationType
8693         implements com.google.protobuf.ProtocolMessageEnum {
8694       /**
8695        * <code>APPEND = 0;</code>
8696        */
8697       APPEND(0, 0),
8698       /**
8699        * <code>INCREMENT = 1;</code>
8700        */
8701       INCREMENT(1, 1),
8702       /**
8703        * <code>PUT = 2;</code>
8704        */
8705       PUT(2, 2),
8706       /**
8707        * <code>DELETE = 3;</code>
8708        */
8709       DELETE(3, 3),
8710       ;
8711 
8712       /**
8713        * <code>APPEND = 0;</code>
8714        */
8715       public static final int APPEND_VALUE = 0;
8716       /**
8717        * <code>INCREMENT = 1;</code>
8718        */
8719       public static final int INCREMENT_VALUE = 1;
8720       /**
8721        * <code>PUT = 2;</code>
8722        */
8723       public static final int PUT_VALUE = 2;
8724       /**
8725        * <code>DELETE = 3;</code>
8726        */
8727       public static final int DELETE_VALUE = 3;
8728 
8729 
getNumber()8730       public final int getNumber() { return value; }
8731 
valueOf(int value)8732       public static MutationType valueOf(int value) {
8733         switch (value) {
8734           case 0: return APPEND;
8735           case 1: return INCREMENT;
8736           case 2: return PUT;
8737           case 3: return DELETE;
8738           default: return null;
8739         }
8740       }
8741 
8742       public static com.google.protobuf.Internal.EnumLiteMap<MutationType>
internalGetValueMap()8743           internalGetValueMap() {
8744         return internalValueMap;
8745       }
8746       private static com.google.protobuf.Internal.EnumLiteMap<MutationType>
8747           internalValueMap =
8748             new com.google.protobuf.Internal.EnumLiteMap<MutationType>() {
8749               public MutationType findValueByNumber(int number) {
8750                 return MutationType.valueOf(number);
8751               }
8752             };
8753 
8754       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()8755           getValueDescriptor() {
8756         return getDescriptor().getValues().get(index);
8757       }
8758       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()8759           getDescriptorForType() {
8760         return getDescriptor();
8761       }
8762       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()8763           getDescriptor() {
8764         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1);
8765       }
8766 
8767       private static final MutationType[] VALUES = values();
8768 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)8769       public static MutationType valueOf(
8770           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8771         if (desc.getType() != getDescriptor()) {
8772           throw new java.lang.IllegalArgumentException(
8773             "EnumValueDescriptor is not for this type.");
8774         }
8775         return VALUES[desc.getIndex()];
8776       }
8777 
8778       private final int index;
8779       private final int value;
8780 
MutationType(int index, int value)8781       private MutationType(int index, int value) {
8782         this.index = index;
8783         this.value = value;
8784       }
8785 
8786       // @@protoc_insertion_point(enum_scope:MutationProto.MutationType)
8787     }
8788 
8789     /**
8790      * Protobuf enum {@code MutationProto.DeleteType}
8791      */
8792     public enum DeleteType
8793         implements com.google.protobuf.ProtocolMessageEnum {
8794       /**
8795        * <code>DELETE_ONE_VERSION = 0;</code>
8796        */
8797       DELETE_ONE_VERSION(0, 0),
8798       /**
8799        * <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
8800        */
8801       DELETE_MULTIPLE_VERSIONS(1, 1),
8802       /**
8803        * <code>DELETE_FAMILY = 2;</code>
8804        */
8805       DELETE_FAMILY(2, 2),
8806       /**
8807        * <code>DELETE_FAMILY_VERSION = 3;</code>
8808        */
8809       DELETE_FAMILY_VERSION(3, 3),
8810       ;
8811 
8812       /**
8813        * <code>DELETE_ONE_VERSION = 0;</code>
8814        */
8815       public static final int DELETE_ONE_VERSION_VALUE = 0;
8816       /**
8817        * <code>DELETE_MULTIPLE_VERSIONS = 1;</code>
8818        */
8819       public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1;
8820       /**
8821        * <code>DELETE_FAMILY = 2;</code>
8822        */
8823       public static final int DELETE_FAMILY_VALUE = 2;
8824       /**
8825        * <code>DELETE_FAMILY_VERSION = 3;</code>
8826        */
8827       public static final int DELETE_FAMILY_VERSION_VALUE = 3;
8828 
8829 
getNumber()8830       public final int getNumber() { return value; }
8831 
valueOf(int value)8832       public static DeleteType valueOf(int value) {
8833         switch (value) {
8834           case 0: return DELETE_ONE_VERSION;
8835           case 1: return DELETE_MULTIPLE_VERSIONS;
8836           case 2: return DELETE_FAMILY;
8837           case 3: return DELETE_FAMILY_VERSION;
8838           default: return null;
8839         }
8840       }
8841 
8842       public static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
internalGetValueMap()8843           internalGetValueMap() {
8844         return internalValueMap;
8845       }
8846       private static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
8847           internalValueMap =
8848             new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() {
8849               public DeleteType findValueByNumber(int number) {
8850                 return DeleteType.valueOf(number);
8851               }
8852             };
8853 
8854       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()8855           getValueDescriptor() {
8856         return getDescriptor().getValues().get(index);
8857       }
8858       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()8859           getDescriptorForType() {
8860         return getDescriptor();
8861       }
8862       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()8863           getDescriptor() {
8864         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(2);
8865       }
8866 
8867       private static final DeleteType[] VALUES = values();
8868 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)8869       public static DeleteType valueOf(
8870           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
8871         if (desc.getType() != getDescriptor()) {
8872           throw new java.lang.IllegalArgumentException(
8873             "EnumValueDescriptor is not for this type.");
8874         }
8875         return VALUES[desc.getIndex()];
8876       }
8877 
8878       private final int index;
8879       private final int value;
8880 
DeleteType(int index, int value)8881       private DeleteType(int index, int value) {
8882         this.index = index;
8883         this.value = value;
8884       }
8885 
8886       // @@protoc_insertion_point(enum_scope:MutationProto.DeleteType)
8887     }
8888 
8889     public interface ColumnValueOrBuilder
8890         extends com.google.protobuf.MessageOrBuilder {
8891 
8892       // required bytes family = 1;
8893       /**
8894        * <code>required bytes family = 1;</code>
8895        */
hasFamily()8896       boolean hasFamily();
8897       /**
8898        * <code>required bytes family = 1;</code>
8899        */
getFamily()8900       com.google.protobuf.ByteString getFamily();
8901 
8902       // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
8903       /**
8904        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8905        */
8906       java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>
getQualifierValueList()8907           getQualifierValueList();
8908       /**
8909        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8910        */
getQualifierValue(int index)8911       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index);
8912       /**
8913        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8914        */
getQualifierValueCount()8915       int getQualifierValueCount();
8916       /**
8917        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8918        */
8919       java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueOrBuilderList()8920           getQualifierValueOrBuilderList();
8921       /**
8922        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
8923        */
getQualifierValueOrBuilder( int index)8924       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
8925           int index);
8926     }
8927     /**
8928      * Protobuf type {@code MutationProto.ColumnValue}
8929      */
8930     public static final class ColumnValue extends
8931         com.google.protobuf.GeneratedMessage
8932         implements ColumnValueOrBuilder {
8933       // Use ColumnValue.newBuilder() to construct.
ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder)8934       private ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8935         super(builder);
8936         this.unknownFields = builder.getUnknownFields();
8937       }
ColumnValue(boolean noInit)8938       private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8939 
8940       private static final ColumnValue defaultInstance;
getDefaultInstance()8941       public static ColumnValue getDefaultInstance() {
8942         return defaultInstance;
8943       }
8944 
getDefaultInstanceForType()8945       public ColumnValue getDefaultInstanceForType() {
8946         return defaultInstance;
8947       }
8948 
8949       private final com.google.protobuf.UnknownFieldSet unknownFields;
8950       @java.lang.Override
8951       public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8952           getUnknownFields() {
8953         return this.unknownFields;
8954       }
ColumnValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8955       private ColumnValue(
8956           com.google.protobuf.CodedInputStream input,
8957           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8958           throws com.google.protobuf.InvalidProtocolBufferException {
8959         initFields();
8960         int mutable_bitField0_ = 0;
8961         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8962             com.google.protobuf.UnknownFieldSet.newBuilder();
8963         try {
8964           boolean done = false;
8965           while (!done) {
8966             int tag = input.readTag();
8967             switch (tag) {
8968               case 0:
8969                 done = true;
8970                 break;
8971               default: {
8972                 if (!parseUnknownField(input, unknownFields,
8973                                        extensionRegistry, tag)) {
8974                   done = true;
8975                 }
8976                 break;
8977               }
8978               case 10: {
8979                 bitField0_ |= 0x00000001;
8980                 family_ = input.readBytes();
8981                 break;
8982               }
8983               case 18: {
8984                 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
8985                   qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>();
8986                   mutable_bitField0_ |= 0x00000002;
8987                 }
8988                 qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry));
8989                 break;
8990               }
8991             }
8992           }
8993         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8994           throw e.setUnfinishedMessage(this);
8995         } catch (java.io.IOException e) {
8996           throw new com.google.protobuf.InvalidProtocolBufferException(
8997               e.getMessage()).setUnfinishedMessage(this);
8998         } finally {
8999           if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
9000             qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
9001           }
9002           this.unknownFields = unknownFields.build();
9003           makeExtensionsImmutable();
9004         }
9005       }
9006       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9007           getDescriptor() {
9008         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor;
9009       }
9010 
9011       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9012           internalGetFieldAccessorTable() {
9013         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable
9014             .ensureFieldAccessorsInitialized(
9015                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
9016       }
9017 
9018       public static com.google.protobuf.Parser<ColumnValue> PARSER =
9019           new com.google.protobuf.AbstractParser<ColumnValue>() {
9020         public ColumnValue parsePartialFrom(
9021             com.google.protobuf.CodedInputStream input,
9022             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9023             throws com.google.protobuf.InvalidProtocolBufferException {
9024           return new ColumnValue(input, extensionRegistry);
9025         }
9026       };
9027 
9028       @java.lang.Override
getParserForType()9029       public com.google.protobuf.Parser<ColumnValue> getParserForType() {
9030         return PARSER;
9031       }
9032 
9033       public interface QualifierValueOrBuilder
9034           extends com.google.protobuf.MessageOrBuilder {
9035 
9036         // optional bytes qualifier = 1;
9037         /**
9038          * <code>optional bytes qualifier = 1;</code>
9039          */
hasQualifier()9040         boolean hasQualifier();
9041         /**
9042          * <code>optional bytes qualifier = 1;</code>
9043          */
getQualifier()9044         com.google.protobuf.ByteString getQualifier();
9045 
9046         // optional bytes value = 2;
9047         /**
9048          * <code>optional bytes value = 2;</code>
9049          */
hasValue()9050         boolean hasValue();
9051         /**
9052          * <code>optional bytes value = 2;</code>
9053          */
getValue()9054         com.google.protobuf.ByteString getValue();
9055 
9056         // optional uint64 timestamp = 3;
9057         /**
9058          * <code>optional uint64 timestamp = 3;</code>
9059          */
hasTimestamp()9060         boolean hasTimestamp();
9061         /**
9062          * <code>optional uint64 timestamp = 3;</code>
9063          */
getTimestamp()9064         long getTimestamp();
9065 
9066         // optional .MutationProto.DeleteType delete_type = 4;
9067         /**
9068          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9069          */
hasDeleteType()9070         boolean hasDeleteType();
9071         /**
9072          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9073          */
getDeleteType()9074         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType();
9075 
9076         // optional bytes tags = 5;
9077         /**
9078          * <code>optional bytes tags = 5;</code>
9079          */
hasTags()9080         boolean hasTags();
9081         /**
9082          * <code>optional bytes tags = 5;</code>
9083          */
getTags()9084         com.google.protobuf.ByteString getTags();
9085       }
9086       /**
9087        * Protobuf type {@code MutationProto.ColumnValue.QualifierValue}
9088        */
9089       public static final class QualifierValue extends
9090           com.google.protobuf.GeneratedMessage
9091           implements QualifierValueOrBuilder {
9092         // Use QualifierValue.newBuilder() to construct.
QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder)9093         private QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
9094           super(builder);
9095           this.unknownFields = builder.getUnknownFields();
9096         }
QualifierValue(boolean noInit)9097         private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
9098 
9099         private static final QualifierValue defaultInstance;
getDefaultInstance()9100         public static QualifierValue getDefaultInstance() {
9101           return defaultInstance;
9102         }
9103 
getDefaultInstanceForType()9104         public QualifierValue getDefaultInstanceForType() {
9105           return defaultInstance;
9106         }
9107 
9108         private final com.google.protobuf.UnknownFieldSet unknownFields;
9109         @java.lang.Override
9110         public final com.google.protobuf.UnknownFieldSet
getUnknownFields()9111             getUnknownFields() {
9112           return this.unknownFields;
9113         }
QualifierValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9114         private QualifierValue(
9115             com.google.protobuf.CodedInputStream input,
9116             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9117             throws com.google.protobuf.InvalidProtocolBufferException {
9118           initFields();
9119           int mutable_bitField0_ = 0;
9120           com.google.protobuf.UnknownFieldSet.Builder unknownFields =
9121               com.google.protobuf.UnknownFieldSet.newBuilder();
9122           try {
9123             boolean done = false;
9124             while (!done) {
9125               int tag = input.readTag();
9126               switch (tag) {
9127                 case 0:
9128                   done = true;
9129                   break;
9130                 default: {
9131                   if (!parseUnknownField(input, unknownFields,
9132                                          extensionRegistry, tag)) {
9133                     done = true;
9134                   }
9135                   break;
9136                 }
9137                 case 10: {
9138                   bitField0_ |= 0x00000001;
9139                   qualifier_ = input.readBytes();
9140                   break;
9141                 }
9142                 case 18: {
9143                   bitField0_ |= 0x00000002;
9144                   value_ = input.readBytes();
9145                   break;
9146                 }
9147                 case 24: {
9148                   bitField0_ |= 0x00000004;
9149                   timestamp_ = input.readUInt64();
9150                   break;
9151                 }
9152                 case 32: {
9153                   int rawValue = input.readEnum();
9154                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue);
9155                   if (value == null) {
9156                     unknownFields.mergeVarintField(4, rawValue);
9157                   } else {
9158                     bitField0_ |= 0x00000008;
9159                     deleteType_ = value;
9160                   }
9161                   break;
9162                 }
9163                 case 42: {
9164                   bitField0_ |= 0x00000010;
9165                   tags_ = input.readBytes();
9166                   break;
9167                 }
9168               }
9169             }
9170           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9171             throw e.setUnfinishedMessage(this);
9172           } catch (java.io.IOException e) {
9173             throw new com.google.protobuf.InvalidProtocolBufferException(
9174                 e.getMessage()).setUnfinishedMessage(this);
9175           } finally {
9176             this.unknownFields = unknownFields.build();
9177             makeExtensionsImmutable();
9178           }
9179         }
9180         public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9181             getDescriptor() {
9182           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
9183         }
9184 
9185         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9186             internalGetFieldAccessorTable() {
9187           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
9188               .ensureFieldAccessorsInitialized(
9189                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
9190         }
9191 
9192         public static com.google.protobuf.Parser<QualifierValue> PARSER =
9193             new com.google.protobuf.AbstractParser<QualifierValue>() {
9194           public QualifierValue parsePartialFrom(
9195               com.google.protobuf.CodedInputStream input,
9196               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9197               throws com.google.protobuf.InvalidProtocolBufferException {
9198             return new QualifierValue(input, extensionRegistry);
9199           }
9200         };
9201 
9202         @java.lang.Override
getParserForType()9203         public com.google.protobuf.Parser<QualifierValue> getParserForType() {
9204           return PARSER;
9205         }
9206 
9207         private int bitField0_;
9208         // optional bytes qualifier = 1;
9209         public static final int QUALIFIER_FIELD_NUMBER = 1;
9210         private com.google.protobuf.ByteString qualifier_;
9211         /**
9212          * <code>optional bytes qualifier = 1;</code>
9213          */
hasQualifier()9214         public boolean hasQualifier() {
9215           return ((bitField0_ & 0x00000001) == 0x00000001);
9216         }
9217         /**
9218          * <code>optional bytes qualifier = 1;</code>
9219          */
getQualifier()9220         public com.google.protobuf.ByteString getQualifier() {
9221           return qualifier_;
9222         }
9223 
9224         // optional bytes value = 2;
9225         public static final int VALUE_FIELD_NUMBER = 2;
9226         private com.google.protobuf.ByteString value_;
9227         /**
9228          * <code>optional bytes value = 2;</code>
9229          */
hasValue()9230         public boolean hasValue() {
9231           return ((bitField0_ & 0x00000002) == 0x00000002);
9232         }
9233         /**
9234          * <code>optional bytes value = 2;</code>
9235          */
getValue()9236         public com.google.protobuf.ByteString getValue() {
9237           return value_;
9238         }
9239 
9240         // optional uint64 timestamp = 3;
9241         public static final int TIMESTAMP_FIELD_NUMBER = 3;
9242         private long timestamp_;
9243         /**
9244          * <code>optional uint64 timestamp = 3;</code>
9245          */
hasTimestamp()9246         public boolean hasTimestamp() {
9247           return ((bitField0_ & 0x00000004) == 0x00000004);
9248         }
9249         /**
9250          * <code>optional uint64 timestamp = 3;</code>
9251          */
getTimestamp()9252         public long getTimestamp() {
9253           return timestamp_;
9254         }
9255 
9256         // optional .MutationProto.DeleteType delete_type = 4;
9257         public static final int DELETE_TYPE_FIELD_NUMBER = 4;
9258         private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_;
9259         /**
9260          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9261          */
hasDeleteType()9262         public boolean hasDeleteType() {
9263           return ((bitField0_ & 0x00000008) == 0x00000008);
9264         }
9265         /**
9266          * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9267          */
getDeleteType()9268         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
9269           return deleteType_;
9270         }
9271 
9272         // optional bytes tags = 5;
9273         public static final int TAGS_FIELD_NUMBER = 5;
9274         private com.google.protobuf.ByteString tags_;
9275         /**
9276          * <code>optional bytes tags = 5;</code>
9277          */
hasTags()9278         public boolean hasTags() {
9279           return ((bitField0_ & 0x00000010) == 0x00000010);
9280         }
9281         /**
9282          * <code>optional bytes tags = 5;</code>
9283          */
getTags()9284         public com.google.protobuf.ByteString getTags() {
9285           return tags_;
9286         }
9287 
initFields()9288         private void initFields() {
9289           qualifier_ = com.google.protobuf.ByteString.EMPTY;
9290           value_ = com.google.protobuf.ByteString.EMPTY;
9291           timestamp_ = 0L;
9292           deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9293           tags_ = com.google.protobuf.ByteString.EMPTY;
9294         }
9295         private byte memoizedIsInitialized = -1;
isInitialized()9296         public final boolean isInitialized() {
9297           byte isInitialized = memoizedIsInitialized;
9298           if (isInitialized != -1) return isInitialized == 1;
9299 
9300           memoizedIsInitialized = 1;
9301           return true;
9302         }
9303 
writeTo(com.google.protobuf.CodedOutputStream output)9304         public void writeTo(com.google.protobuf.CodedOutputStream output)
9305                             throws java.io.IOException {
9306           getSerializedSize();
9307           if (((bitField0_ & 0x00000001) == 0x00000001)) {
9308             output.writeBytes(1, qualifier_);
9309           }
9310           if (((bitField0_ & 0x00000002) == 0x00000002)) {
9311             output.writeBytes(2, value_);
9312           }
9313           if (((bitField0_ & 0x00000004) == 0x00000004)) {
9314             output.writeUInt64(3, timestamp_);
9315           }
9316           if (((bitField0_ & 0x00000008) == 0x00000008)) {
9317             output.writeEnum(4, deleteType_.getNumber());
9318           }
9319           if (((bitField0_ & 0x00000010) == 0x00000010)) {
9320             output.writeBytes(5, tags_);
9321           }
9322           getUnknownFields().writeTo(output);
9323         }
9324 
9325         private int memoizedSerializedSize = -1;
getSerializedSize()9326         public int getSerializedSize() {
9327           int size = memoizedSerializedSize;
9328           if (size != -1) return size;
9329 
9330           size = 0;
9331           if (((bitField0_ & 0x00000001) == 0x00000001)) {
9332             size += com.google.protobuf.CodedOutputStream
9333               .computeBytesSize(1, qualifier_);
9334           }
9335           if (((bitField0_ & 0x00000002) == 0x00000002)) {
9336             size += com.google.protobuf.CodedOutputStream
9337               .computeBytesSize(2, value_);
9338           }
9339           if (((bitField0_ & 0x00000004) == 0x00000004)) {
9340             size += com.google.protobuf.CodedOutputStream
9341               .computeUInt64Size(3, timestamp_);
9342           }
9343           if (((bitField0_ & 0x00000008) == 0x00000008)) {
9344             size += com.google.protobuf.CodedOutputStream
9345               .computeEnumSize(4, deleteType_.getNumber());
9346           }
9347           if (((bitField0_ & 0x00000010) == 0x00000010)) {
9348             size += com.google.protobuf.CodedOutputStream
9349               .computeBytesSize(5, tags_);
9350           }
9351           size += getUnknownFields().getSerializedSize();
9352           memoizedSerializedSize = size;
9353           return size;
9354         }
9355 
9356         private static final long serialVersionUID = 0L;
9357         @java.lang.Override
writeReplace()9358         protected java.lang.Object writeReplace()
9359             throws java.io.ObjectStreamException {
9360           return super.writeReplace();
9361         }
9362 
9363         @java.lang.Override
equals(final java.lang.Object obj)9364         public boolean equals(final java.lang.Object obj) {
9365           if (obj == this) {
9366            return true;
9367           }
9368           if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)) {
9369             return super.equals(obj);
9370           }
9371           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj;
9372 
9373           boolean result = true;
9374           result = result && (hasQualifier() == other.hasQualifier());
9375           if (hasQualifier()) {
9376             result = result && getQualifier()
9377                 .equals(other.getQualifier());
9378           }
9379           result = result && (hasValue() == other.hasValue());
9380           if (hasValue()) {
9381             result = result && getValue()
9382                 .equals(other.getValue());
9383           }
9384           result = result && (hasTimestamp() == other.hasTimestamp());
9385           if (hasTimestamp()) {
9386             result = result && (getTimestamp()
9387                 == other.getTimestamp());
9388           }
9389           result = result && (hasDeleteType() == other.hasDeleteType());
9390           if (hasDeleteType()) {
9391             result = result &&
9392                 (getDeleteType() == other.getDeleteType());
9393           }
9394           result = result && (hasTags() == other.hasTags());
9395           if (hasTags()) {
9396             result = result && getTags()
9397                 .equals(other.getTags());
9398           }
9399           result = result &&
9400               getUnknownFields().equals(other.getUnknownFields());
9401           return result;
9402         }
9403 
9404         private int memoizedHashCode = 0;
9405         @java.lang.Override
hashCode()9406         public int hashCode() {
9407           if (memoizedHashCode != 0) {
9408             return memoizedHashCode;
9409           }
9410           int hash = 41;
9411           hash = (19 * hash) + getDescriptorForType().hashCode();
9412           if (hasQualifier()) {
9413             hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
9414             hash = (53 * hash) + getQualifier().hashCode();
9415           }
9416           if (hasValue()) {
9417             hash = (37 * hash) + VALUE_FIELD_NUMBER;
9418             hash = (53 * hash) + getValue().hashCode();
9419           }
9420           if (hasTimestamp()) {
9421             hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
9422             hash = (53 * hash) + hashLong(getTimestamp());
9423           }
9424           if (hasDeleteType()) {
9425             hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER;
9426             hash = (53 * hash) + hashEnum(getDeleteType());
9427           }
9428           if (hasTags()) {
9429             hash = (37 * hash) + TAGS_FIELD_NUMBER;
9430             hash = (53 * hash) + getTags().hashCode();
9431           }
9432           hash = (29 * hash) + getUnknownFields().hashCode();
9433           memoizedHashCode = hash;
9434           return hash;
9435         }
9436 
parseFrom( com.google.protobuf.ByteString data)9437         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9438             com.google.protobuf.ByteString data)
9439             throws com.google.protobuf.InvalidProtocolBufferException {
9440           return PARSER.parseFrom(data);
9441         }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9442         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9443             com.google.protobuf.ByteString data,
9444             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9445             throws com.google.protobuf.InvalidProtocolBufferException {
9446           return PARSER.parseFrom(data, extensionRegistry);
9447         }
parseFrom(byte[] data)9448         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data)
9449             throws com.google.protobuf.InvalidProtocolBufferException {
9450           return PARSER.parseFrom(data);
9451         }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9452         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9453             byte[] data,
9454             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9455             throws com.google.protobuf.InvalidProtocolBufferException {
9456           return PARSER.parseFrom(data, extensionRegistry);
9457         }
parseFrom(java.io.InputStream input)9458         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input)
9459             throws java.io.IOException {
9460           return PARSER.parseFrom(input);
9461         }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9462         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9463             java.io.InputStream input,
9464             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9465             throws java.io.IOException {
9466           return PARSER.parseFrom(input, extensionRegistry);
9467         }
parseDelimitedFrom(java.io.InputStream input)9468         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input)
9469             throws java.io.IOException {
9470           return PARSER.parseDelimitedFrom(input);
9471         }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9472         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(
9473             java.io.InputStream input,
9474             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9475             throws java.io.IOException {
9476           return PARSER.parseDelimitedFrom(input, extensionRegistry);
9477         }
parseFrom( com.google.protobuf.CodedInputStream input)9478         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9479             com.google.protobuf.CodedInputStream input)
9480             throws java.io.IOException {
9481           return PARSER.parseFrom(input);
9482         }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9483         public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(
9484             com.google.protobuf.CodedInputStream input,
9485             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9486             throws java.io.IOException {
9487           return PARSER.parseFrom(input, extensionRegistry);
9488         }
9489 
newBuilder()9490         public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()9491         public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype)9492         public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) {
9493           return newBuilder().mergeFrom(prototype);
9494         }
toBuilder()9495         public Builder toBuilder() { return newBuilder(this); }
9496 
9497         @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9498         protected Builder newBuilderForType(
9499             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9500           Builder builder = new Builder(parent);
9501           return builder;
9502         }
9503         /**
9504          * Protobuf type {@code MutationProto.ColumnValue.QualifierValue}
9505          */
9506         public static final class Builder extends
9507             com.google.protobuf.GeneratedMessage.Builder<Builder>
9508            implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder {
9509           public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()9510               getDescriptor() {
9511             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
9512           }
9513 
9514           protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()9515               internalGetFieldAccessorTable() {
9516             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable
9517                 .ensureFieldAccessorsInitialized(
9518                     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class);
9519           }
9520 
9521           // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder()
Builder()9522           private Builder() {
9523             maybeForceBuilderInitialization();
9524           }
9525 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9526           private Builder(
9527               com.google.protobuf.GeneratedMessage.BuilderParent parent) {
9528             super(parent);
9529             maybeForceBuilderInitialization();
9530           }
maybeForceBuilderInitialization()9531           private void maybeForceBuilderInitialization() {
9532             if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
9533             }
9534           }
create()9535           private static Builder create() {
9536             return new Builder();
9537           }
9538 
clear()9539           public Builder clear() {
9540             super.clear();
9541             qualifier_ = com.google.protobuf.ByteString.EMPTY;
9542             bitField0_ = (bitField0_ & ~0x00000001);
9543             value_ = com.google.protobuf.ByteString.EMPTY;
9544             bitField0_ = (bitField0_ & ~0x00000002);
9545             timestamp_ = 0L;
9546             bitField0_ = (bitField0_ & ~0x00000004);
9547             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9548             bitField0_ = (bitField0_ & ~0x00000008);
9549             tags_ = com.google.protobuf.ByteString.EMPTY;
9550             bitField0_ = (bitField0_ & ~0x00000010);
9551             return this;
9552           }
9553 
clone()9554           public Builder clone() {
9555             return create().mergeFrom(buildPartial());
9556           }
9557 
9558           public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()9559               getDescriptorForType() {
9560             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
9561           }
9562 
getDefaultInstanceForType()9563           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() {
9564             return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance();
9565           }
9566 
build()9567           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() {
9568             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial();
9569             if (!result.isInitialized()) {
9570               throw newUninitializedMessageException(result);
9571             }
9572             return result;
9573           }
9574 
buildPartial()9575           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() {
9576             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this);
9577             int from_bitField0_ = bitField0_;
9578             int to_bitField0_ = 0;
9579             if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
9580               to_bitField0_ |= 0x00000001;
9581             }
9582             result.qualifier_ = qualifier_;
9583             if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
9584               to_bitField0_ |= 0x00000002;
9585             }
9586             result.value_ = value_;
9587             if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
9588               to_bitField0_ |= 0x00000004;
9589             }
9590             result.timestamp_ = timestamp_;
9591             if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
9592               to_bitField0_ |= 0x00000008;
9593             }
9594             result.deleteType_ = deleteType_;
9595             if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
9596               to_bitField0_ |= 0x00000010;
9597             }
9598             result.tags_ = tags_;
9599             result.bitField0_ = to_bitField0_;
9600             onBuilt();
9601             return result;
9602           }
9603 
mergeFrom(com.google.protobuf.Message other)9604           public Builder mergeFrom(com.google.protobuf.Message other) {
9605             if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) {
9606               return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other);
9607             } else {
9608               super.mergeFrom(other);
9609               return this;
9610             }
9611           }
9612 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other)9613           public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) {
9614             if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this;
9615             if (other.hasQualifier()) {
9616               setQualifier(other.getQualifier());
9617             }
9618             if (other.hasValue()) {
9619               setValue(other.getValue());
9620             }
9621             if (other.hasTimestamp()) {
9622               setTimestamp(other.getTimestamp());
9623             }
9624             if (other.hasDeleteType()) {
9625               setDeleteType(other.getDeleteType());
9626             }
9627             if (other.hasTags()) {
9628               setTags(other.getTags());
9629             }
9630             this.mergeUnknownFields(other.getUnknownFields());
9631             return this;
9632           }
9633 
isInitialized()9634           public final boolean isInitialized() {
9635             return true;
9636           }
9637 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9638           public Builder mergeFrom(
9639               com.google.protobuf.CodedInputStream input,
9640               com.google.protobuf.ExtensionRegistryLite extensionRegistry)
9641               throws java.io.IOException {
9642             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null;
9643             try {
9644               parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
9645             } catch (com.google.protobuf.InvalidProtocolBufferException e) {
9646               parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage();
9647               throw e;
9648             } finally {
9649               if (parsedMessage != null) {
9650                 mergeFrom(parsedMessage);
9651               }
9652             }
9653             return this;
9654           }
9655           private int bitField0_;
9656 
9657           // optional bytes qualifier = 1;
9658           private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
9659           /**
9660            * <code>optional bytes qualifier = 1;</code>
9661            */
hasQualifier()9662           public boolean hasQualifier() {
9663             return ((bitField0_ & 0x00000001) == 0x00000001);
9664           }
9665           /**
9666            * <code>optional bytes qualifier = 1;</code>
9667            */
getQualifier()9668           public com.google.protobuf.ByteString getQualifier() {
9669             return qualifier_;
9670           }
9671           /**
9672            * <code>optional bytes qualifier = 1;</code>
9673            */
setQualifier(com.google.protobuf.ByteString value)9674           public Builder setQualifier(com.google.protobuf.ByteString value) {
9675             if (value == null) {
9676     throw new NullPointerException();
9677   }
9678   bitField0_ |= 0x00000001;
9679             qualifier_ = value;
9680             onChanged();
9681             return this;
9682           }
9683           /**
9684            * <code>optional bytes qualifier = 1;</code>
9685            */
clearQualifier()9686           public Builder clearQualifier() {
9687             bitField0_ = (bitField0_ & ~0x00000001);
9688             qualifier_ = getDefaultInstance().getQualifier();
9689             onChanged();
9690             return this;
9691           }
9692 
9693           // optional bytes value = 2;
9694           private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY;
9695           /**
9696            * <code>optional bytes value = 2;</code>
9697            */
hasValue()9698           public boolean hasValue() {
9699             return ((bitField0_ & 0x00000002) == 0x00000002);
9700           }
9701           /**
9702            * <code>optional bytes value = 2;</code>
9703            */
getValue()9704           public com.google.protobuf.ByteString getValue() {
9705             return value_;
9706           }
9707           /**
9708            * <code>optional bytes value = 2;</code>
9709            */
setValue(com.google.protobuf.ByteString value)9710           public Builder setValue(com.google.protobuf.ByteString value) {
9711             if (value == null) {
9712     throw new NullPointerException();
9713   }
9714   bitField0_ |= 0x00000002;
9715             value_ = value;
9716             onChanged();
9717             return this;
9718           }
9719           /**
9720            * <code>optional bytes value = 2;</code>
9721            */
clearValue()9722           public Builder clearValue() {
9723             bitField0_ = (bitField0_ & ~0x00000002);
9724             value_ = getDefaultInstance().getValue();
9725             onChanged();
9726             return this;
9727           }
9728 
9729           // optional uint64 timestamp = 3;
9730           private long timestamp_ ;
9731           /**
9732            * <code>optional uint64 timestamp = 3;</code>
9733            */
hasTimestamp()9734           public boolean hasTimestamp() {
9735             return ((bitField0_ & 0x00000004) == 0x00000004);
9736           }
9737           /**
9738            * <code>optional uint64 timestamp = 3;</code>
9739            */
getTimestamp()9740           public long getTimestamp() {
9741             return timestamp_;
9742           }
9743           /**
9744            * <code>optional uint64 timestamp = 3;</code>
9745            */
setTimestamp(long value)9746           public Builder setTimestamp(long value) {
9747             bitField0_ |= 0x00000004;
9748             timestamp_ = value;
9749             onChanged();
9750             return this;
9751           }
9752           /**
9753            * <code>optional uint64 timestamp = 3;</code>
9754            */
clearTimestamp()9755           public Builder clearTimestamp() {
9756             bitField0_ = (bitField0_ & ~0x00000004);
9757             timestamp_ = 0L;
9758             onChanged();
9759             return this;
9760           }
9761 
9762           // optional .MutationProto.DeleteType delete_type = 4;
9763           private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9764           /**
9765            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9766            */
hasDeleteType()9767           public boolean hasDeleteType() {
9768             return ((bitField0_ & 0x00000008) == 0x00000008);
9769           }
9770           /**
9771            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9772            */
getDeleteType()9773           public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() {
9774             return deleteType_;
9775           }
9776           /**
9777            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9778            */
setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value)9779           public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value) {
9780             if (value == null) {
9781               throw new NullPointerException();
9782             }
9783             bitField0_ |= 0x00000008;
9784             deleteType_ = value;
9785             onChanged();
9786             return this;
9787           }
9788           /**
9789            * <code>optional .MutationProto.DeleteType delete_type = 4;</code>
9790            */
clearDeleteType()9791           public Builder clearDeleteType() {
9792             bitField0_ = (bitField0_ & ~0x00000008);
9793             deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION;
9794             onChanged();
9795             return this;
9796           }
9797 
9798           // optional bytes tags = 5;
9799           private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY;
9800           /**
9801            * <code>optional bytes tags = 5;</code>
9802            */
hasTags()9803           public boolean hasTags() {
9804             return ((bitField0_ & 0x00000010) == 0x00000010);
9805           }
9806           /**
9807            * <code>optional bytes tags = 5;</code>
9808            */
getTags()9809           public com.google.protobuf.ByteString getTags() {
9810             return tags_;
9811           }
9812           /**
9813            * <code>optional bytes tags = 5;</code>
9814            */
setTags(com.google.protobuf.ByteString value)9815           public Builder setTags(com.google.protobuf.ByteString value) {
9816             if (value == null) {
9817     throw new NullPointerException();
9818   }
9819   bitField0_ |= 0x00000010;
9820             tags_ = value;
9821             onChanged();
9822             return this;
9823           }
9824           /**
9825            * <code>optional bytes tags = 5;</code>
9826            */
clearTags()9827           public Builder clearTags() {
9828             bitField0_ = (bitField0_ & ~0x00000010);
9829             tags_ = getDefaultInstance().getTags();
9830             onChanged();
9831             return this;
9832           }
9833 
9834           // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue.QualifierValue)
9835         }
9836 
9837         static {
9838           defaultInstance = new QualifierValue(true);
defaultInstance.initFields()9839           defaultInstance.initFields();
9840         }
9841 
9842         // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue.QualifierValue)
9843       }
9844 
9845       private int bitField0_;
9846       // required bytes family = 1;
9847       public static final int FAMILY_FIELD_NUMBER = 1;
9848       private com.google.protobuf.ByteString family_;
9849       /**
9850        * <code>required bytes family = 1;</code>
9851        */
hasFamily()9852       public boolean hasFamily() {
9853         return ((bitField0_ & 0x00000001) == 0x00000001);
9854       }
9855       /**
9856        * <code>required bytes family = 1;</code>
9857        */
getFamily()9858       public com.google.protobuf.ByteString getFamily() {
9859         return family_;
9860       }
9861 
9862       // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
9863       public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2;
9864       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_;
9865       /**
9866        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9867        */
getQualifierValueList()9868       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
9869         return qualifierValue_;
9870       }
9871       /**
9872        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9873        */
9874       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueOrBuilderList()9875           getQualifierValueOrBuilderList() {
9876         return qualifierValue_;
9877       }
9878       /**
9879        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9880        */
getQualifierValueCount()9881       public int getQualifierValueCount() {
9882         return qualifierValue_.size();
9883       }
9884       /**
9885        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9886        */
getQualifierValue(int index)9887       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
9888         return qualifierValue_.get(index);
9889       }
9890       /**
9891        * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
9892        */
getQualifierValueOrBuilder( int index)9893       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
9894           int index) {
9895         return qualifierValue_.get(index);
9896       }
9897 
initFields()9898       private void initFields() {
9899         family_ = com.google.protobuf.ByteString.EMPTY;
9900         qualifierValue_ = java.util.Collections.emptyList();
9901       }
9902       private byte memoizedIsInitialized = -1;
isInitialized()9903       public final boolean isInitialized() {
9904         byte isInitialized = memoizedIsInitialized;
9905         if (isInitialized != -1) return isInitialized == 1;
9906 
9907         if (!hasFamily()) {
9908           memoizedIsInitialized = 0;
9909           return false;
9910         }
9911         memoizedIsInitialized = 1;
9912         return true;
9913       }
9914 
writeTo(com.google.protobuf.CodedOutputStream output)9915       public void writeTo(com.google.protobuf.CodedOutputStream output)
9916                           throws java.io.IOException {
9917         getSerializedSize();
9918         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9919           output.writeBytes(1, family_);
9920         }
9921         for (int i = 0; i < qualifierValue_.size(); i++) {
9922           output.writeMessage(2, qualifierValue_.get(i));
9923         }
9924         getUnknownFields().writeTo(output);
9925       }
9926 
9927       private int memoizedSerializedSize = -1;
getSerializedSize()9928       public int getSerializedSize() {
9929         int size = memoizedSerializedSize;
9930         if (size != -1) return size;
9931 
9932         size = 0;
9933         if (((bitField0_ & 0x00000001) == 0x00000001)) {
9934           size += com.google.protobuf.CodedOutputStream
9935             .computeBytesSize(1, family_);
9936         }
9937         for (int i = 0; i < qualifierValue_.size(); i++) {
9938           size += com.google.protobuf.CodedOutputStream
9939             .computeMessageSize(2, qualifierValue_.get(i));
9940         }
9941         size += getUnknownFields().getSerializedSize();
9942         memoizedSerializedSize = size;
9943         return size;
9944       }
9945 
9946       private static final long serialVersionUID = 0L;
9947       @java.lang.Override
writeReplace()9948       protected java.lang.Object writeReplace()
9949           throws java.io.ObjectStreamException {
9950         return super.writeReplace();
9951       }
9952 
9953       @java.lang.Override
equals(final java.lang.Object obj)9954       public boolean equals(final java.lang.Object obj) {
9955         if (obj == this) {
9956          return true;
9957         }
9958         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)) {
9959           return super.equals(obj);
9960         }
9961         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj;
9962 
9963         boolean result = true;
9964         result = result && (hasFamily() == other.hasFamily());
9965         if (hasFamily()) {
9966           result = result && getFamily()
9967               .equals(other.getFamily());
9968         }
9969         result = result && getQualifierValueList()
9970             .equals(other.getQualifierValueList());
9971         result = result &&
9972             getUnknownFields().equals(other.getUnknownFields());
9973         return result;
9974       }
9975 
9976       private int memoizedHashCode = 0;
9977       @java.lang.Override
hashCode()9978       public int hashCode() {
9979         if (memoizedHashCode != 0) {
9980           return memoizedHashCode;
9981         }
9982         int hash = 41;
9983         hash = (19 * hash) + getDescriptorForType().hashCode();
9984         if (hasFamily()) {
9985           hash = (37 * hash) + FAMILY_FIELD_NUMBER;
9986           hash = (53 * hash) + getFamily().hashCode();
9987         }
9988         if (getQualifierValueCount() > 0) {
9989           hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER;
9990           hash = (53 * hash) + getQualifierValueList().hashCode();
9991         }
9992         hash = (29 * hash) + getUnknownFields().hashCode();
9993         memoizedHashCode = hash;
9994         return hash;
9995       }
9996 
parseFrom( com.google.protobuf.ByteString data)9997       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
9998           com.google.protobuf.ByteString data)
9999           throws com.google.protobuf.InvalidProtocolBufferException {
10000         return PARSER.parseFrom(data);
10001       }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10002       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
10003           com.google.protobuf.ByteString data,
10004           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10005           throws com.google.protobuf.InvalidProtocolBufferException {
10006         return PARSER.parseFrom(data, extensionRegistry);
10007       }
parseFrom(byte[] data)10008       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data)
10009           throws com.google.protobuf.InvalidProtocolBufferException {
10010         return PARSER.parseFrom(data);
10011       }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10012       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
10013           byte[] data,
10014           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10015           throws com.google.protobuf.InvalidProtocolBufferException {
10016         return PARSER.parseFrom(data, extensionRegistry);
10017       }
parseFrom(java.io.InputStream input)10018       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input)
10019           throws java.io.IOException {
10020         return PARSER.parseFrom(input);
10021       }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10022       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
10023           java.io.InputStream input,
10024           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10025           throws java.io.IOException {
10026         return PARSER.parseFrom(input, extensionRegistry);
10027       }
parseDelimitedFrom(java.io.InputStream input)10028       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input)
10029           throws java.io.IOException {
10030         return PARSER.parseDelimitedFrom(input);
10031       }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10032       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(
10033           java.io.InputStream input,
10034           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10035           throws java.io.IOException {
10036         return PARSER.parseDelimitedFrom(input, extensionRegistry);
10037       }
parseFrom( com.google.protobuf.CodedInputStream input)10038       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
10039           com.google.protobuf.CodedInputStream input)
10040           throws java.io.IOException {
10041         return PARSER.parseFrom(input);
10042       }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10043       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(
10044           com.google.protobuf.CodedInputStream input,
10045           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10046           throws java.io.IOException {
10047         return PARSER.parseFrom(input, extensionRegistry);
10048       }
10049 
newBuilder()10050       public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()10051       public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype)10052       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) {
10053         return newBuilder().mergeFrom(prototype);
10054       }
toBuilder()10055       public Builder toBuilder() { return newBuilder(this); }
10056 
10057       @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10058       protected Builder newBuilderForType(
10059           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10060         Builder builder = new Builder(parent);
10061         return builder;
10062       }
10063       /**
10064        * Protobuf type {@code MutationProto.ColumnValue}
10065        */
10066       public static final class Builder extends
10067           com.google.protobuf.GeneratedMessage.Builder<Builder>
10068          implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder {
10069         public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()10070             getDescriptor() {
10071           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor;
10072         }
10073 
10074         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()10075             internalGetFieldAccessorTable() {
10076           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable
10077               .ensureFieldAccessorsInitialized(
10078                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class);
10079         }
10080 
10081         // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder()
Builder()10082         private Builder() {
10083           maybeForceBuilderInitialization();
10084         }
10085 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10086         private Builder(
10087             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
10088           super(parent);
10089           maybeForceBuilderInitialization();
10090         }
maybeForceBuilderInitialization()10091         private void maybeForceBuilderInitialization() {
10092           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
10093             getQualifierValueFieldBuilder();
10094           }
10095         }
create()10096         private static Builder create() {
10097           return new Builder();
10098         }
10099 
clear()10100         public Builder clear() {
10101           super.clear();
10102           family_ = com.google.protobuf.ByteString.EMPTY;
10103           bitField0_ = (bitField0_ & ~0x00000001);
10104           if (qualifierValueBuilder_ == null) {
10105             qualifierValue_ = java.util.Collections.emptyList();
10106             bitField0_ = (bitField0_ & ~0x00000002);
10107           } else {
10108             qualifierValueBuilder_.clear();
10109           }
10110           return this;
10111         }
10112 
clone()10113         public Builder clone() {
10114           return create().mergeFrom(buildPartial());
10115         }
10116 
10117         public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()10118             getDescriptorForType() {
10119           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor;
10120         }
10121 
getDefaultInstanceForType()10122         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() {
10123           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance();
10124         }
10125 
build()10126         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() {
10127           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial();
10128           if (!result.isInitialized()) {
10129             throw newUninitializedMessageException(result);
10130           }
10131           return result;
10132         }
10133 
buildPartial()10134         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() {
10135           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this);
10136           int from_bitField0_ = bitField0_;
10137           int to_bitField0_ = 0;
10138           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
10139             to_bitField0_ |= 0x00000001;
10140           }
10141           result.family_ = family_;
10142           if (qualifierValueBuilder_ == null) {
10143             if (((bitField0_ & 0x00000002) == 0x00000002)) {
10144               qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_);
10145               bitField0_ = (bitField0_ & ~0x00000002);
10146             }
10147             result.qualifierValue_ = qualifierValue_;
10148           } else {
10149             result.qualifierValue_ = qualifierValueBuilder_.build();
10150           }
10151           result.bitField0_ = to_bitField0_;
10152           onBuilt();
10153           return result;
10154         }
10155 
mergeFrom(com.google.protobuf.Message other)10156         public Builder mergeFrom(com.google.protobuf.Message other) {
10157           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) {
10158             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other);
10159           } else {
10160             super.mergeFrom(other);
10161             return this;
10162           }
10163         }
10164 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other)10165         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) {
10166           if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this;
10167           if (other.hasFamily()) {
10168             setFamily(other.getFamily());
10169           }
10170           if (qualifierValueBuilder_ == null) {
10171             if (!other.qualifierValue_.isEmpty()) {
10172               if (qualifierValue_.isEmpty()) {
10173                 qualifierValue_ = other.qualifierValue_;
10174                 bitField0_ = (bitField0_ & ~0x00000002);
10175               } else {
10176                 ensureQualifierValueIsMutable();
10177                 qualifierValue_.addAll(other.qualifierValue_);
10178               }
10179               onChanged();
10180             }
10181           } else {
10182             if (!other.qualifierValue_.isEmpty()) {
10183               if (qualifierValueBuilder_.isEmpty()) {
10184                 qualifierValueBuilder_.dispose();
10185                 qualifierValueBuilder_ = null;
10186                 qualifierValue_ = other.qualifierValue_;
10187                 bitField0_ = (bitField0_ & ~0x00000002);
10188                 qualifierValueBuilder_ =
10189                   com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
10190                      getQualifierValueFieldBuilder() : null;
10191               } else {
10192                 qualifierValueBuilder_.addAllMessages(other.qualifierValue_);
10193               }
10194             }
10195           }
10196           this.mergeUnknownFields(other.getUnknownFields());
10197           return this;
10198         }
10199 
isInitialized()10200         public final boolean isInitialized() {
10201           if (!hasFamily()) {
10202 
10203             return false;
10204           }
10205           return true;
10206         }
10207 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10208         public Builder mergeFrom(
10209             com.google.protobuf.CodedInputStream input,
10210             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10211             throws java.io.IOException {
10212           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null;
10213           try {
10214             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
10215           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
10216             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage();
10217             throw e;
10218           } finally {
10219             if (parsedMessage != null) {
10220               mergeFrom(parsedMessage);
10221             }
10222           }
10223           return this;
10224         }
10225         private int bitField0_;
10226 
10227         // required bytes family = 1;
10228         private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
10229         /**
10230          * <code>required bytes family = 1;</code>
10231          */
hasFamily()10232         public boolean hasFamily() {
10233           return ((bitField0_ & 0x00000001) == 0x00000001);
10234         }
10235         /**
10236          * <code>required bytes family = 1;</code>
10237          */
getFamily()10238         public com.google.protobuf.ByteString getFamily() {
10239           return family_;
10240         }
10241         /**
10242          * <code>required bytes family = 1;</code>
10243          */
setFamily(com.google.protobuf.ByteString value)10244         public Builder setFamily(com.google.protobuf.ByteString value) {
10245           if (value == null) {
10246     throw new NullPointerException();
10247   }
10248   bitField0_ |= 0x00000001;
10249           family_ = value;
10250           onChanged();
10251           return this;
10252         }
10253         /**
10254          * <code>required bytes family = 1;</code>
10255          */
clearFamily()10256         public Builder clearFamily() {
10257           bitField0_ = (bitField0_ & ~0x00000001);
10258           family_ = getDefaultInstance().getFamily();
10259           onChanged();
10260           return this;
10261         }
10262 
10263         // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;
10264         private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_ =
10265           java.util.Collections.emptyList();
ensureQualifierValueIsMutable()10266         private void ensureQualifierValueIsMutable() {
10267           if (!((bitField0_ & 0x00000002) == 0x00000002)) {
10268             qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(qualifierValue_);
10269             bitField0_ |= 0x00000002;
10270            }
10271         }
10272 
10273         private com.google.protobuf.RepeatedFieldBuilder<
10274             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_;
10275 
10276         /**
10277          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10278          */
getQualifierValueList()10279         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() {
10280           if (qualifierValueBuilder_ == null) {
10281             return java.util.Collections.unmodifiableList(qualifierValue_);
10282           } else {
10283             return qualifierValueBuilder_.getMessageList();
10284           }
10285         }
10286         /**
10287          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10288          */
getQualifierValueCount()10289         public int getQualifierValueCount() {
10290           if (qualifierValueBuilder_ == null) {
10291             return qualifierValue_.size();
10292           } else {
10293             return qualifierValueBuilder_.getCount();
10294           }
10295         }
10296         /**
10297          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10298          */
getQualifierValue(int index)10299         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) {
10300           if (qualifierValueBuilder_ == null) {
10301             return qualifierValue_.get(index);
10302           } else {
10303             return qualifierValueBuilder_.getMessage(index);
10304           }
10305         }
10306         /**
10307          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10308          */
setQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value)10309         public Builder setQualifierValue(
10310             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
10311           if (qualifierValueBuilder_ == null) {
10312             if (value == null) {
10313               throw new NullPointerException();
10314             }
10315             ensureQualifierValueIsMutable();
10316             qualifierValue_.set(index, value);
10317             onChanged();
10318           } else {
10319             qualifierValueBuilder_.setMessage(index, value);
10320           }
10321           return this;
10322         }
10323         /**
10324          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10325          */
setQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue)10326         public Builder setQualifierValue(
10327             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
10328           if (qualifierValueBuilder_ == null) {
10329             ensureQualifierValueIsMutable();
10330             qualifierValue_.set(index, builderForValue.build());
10331             onChanged();
10332           } else {
10333             qualifierValueBuilder_.setMessage(index, builderForValue.build());
10334           }
10335           return this;
10336         }
10337         /**
10338          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10339          */
addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value)10340         public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
10341           if (qualifierValueBuilder_ == null) {
10342             if (value == null) {
10343               throw new NullPointerException();
10344             }
10345             ensureQualifierValueIsMutable();
10346             qualifierValue_.add(value);
10347             onChanged();
10348           } else {
10349             qualifierValueBuilder_.addMessage(value);
10350           }
10351           return this;
10352         }
10353         /**
10354          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10355          */
addQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value)10356         public Builder addQualifierValue(
10357             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) {
10358           if (qualifierValueBuilder_ == null) {
10359             if (value == null) {
10360               throw new NullPointerException();
10361             }
10362             ensureQualifierValueIsMutable();
10363             qualifierValue_.add(index, value);
10364             onChanged();
10365           } else {
10366             qualifierValueBuilder_.addMessage(index, value);
10367           }
10368           return this;
10369         }
10370         /**
10371          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10372          */
addQualifierValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue)10373         public Builder addQualifierValue(
10374             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
10375           if (qualifierValueBuilder_ == null) {
10376             ensureQualifierValueIsMutable();
10377             qualifierValue_.add(builderForValue.build());
10378             onChanged();
10379           } else {
10380             qualifierValueBuilder_.addMessage(builderForValue.build());
10381           }
10382           return this;
10383         }
10384         /**
10385          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10386          */
addQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue)10387         public Builder addQualifierValue(
10388             int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) {
10389           if (qualifierValueBuilder_ == null) {
10390             ensureQualifierValueIsMutable();
10391             qualifierValue_.add(index, builderForValue.build());
10392             onChanged();
10393           } else {
10394             qualifierValueBuilder_.addMessage(index, builderForValue.build());
10395           }
10396           return this;
10397         }
10398         /**
10399          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10400          */
addAllQualifierValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values)10401         public Builder addAllQualifierValue(
10402             java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values) {
10403           if (qualifierValueBuilder_ == null) {
10404             ensureQualifierValueIsMutable();
10405             super.addAll(values, qualifierValue_);
10406             onChanged();
10407           } else {
10408             qualifierValueBuilder_.addAllMessages(values);
10409           }
10410           return this;
10411         }
10412         /**
10413          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10414          */
clearQualifierValue()10415         public Builder clearQualifierValue() {
10416           if (qualifierValueBuilder_ == null) {
10417             qualifierValue_ = java.util.Collections.emptyList();
10418             bitField0_ = (bitField0_ & ~0x00000002);
10419             onChanged();
10420           } else {
10421             qualifierValueBuilder_.clear();
10422           }
10423           return this;
10424         }
10425         /**
10426          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10427          */
removeQualifierValue(int index)10428         public Builder removeQualifierValue(int index) {
10429           if (qualifierValueBuilder_ == null) {
10430             ensureQualifierValueIsMutable();
10431             qualifierValue_.remove(index);
10432             onChanged();
10433           } else {
10434             qualifierValueBuilder_.remove(index);
10435           }
10436           return this;
10437         }
10438         /**
10439          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10440          */
getQualifierValueBuilder( int index)10441         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder(
10442             int index) {
10443           return getQualifierValueFieldBuilder().getBuilder(index);
10444         }
10445         /**
10446          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10447          */
getQualifierValueOrBuilder( int index)10448         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder(
10449             int index) {
10450           if (qualifierValueBuilder_ == null) {
10451             return qualifierValue_.get(index);  } else {
10452             return qualifierValueBuilder_.getMessageOrBuilder(index);
10453           }
10454         }
10455         /**
10456          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10457          */
10458         public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueOrBuilderList()10459              getQualifierValueOrBuilderList() {
10460           if (qualifierValueBuilder_ != null) {
10461             return qualifierValueBuilder_.getMessageOrBuilderList();
10462           } else {
10463             return java.util.Collections.unmodifiableList(qualifierValue_);
10464           }
10465         }
10466         /**
10467          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10468          */
addQualifierValueBuilder()10469         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() {
10470           return getQualifierValueFieldBuilder().addBuilder(
10471               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
10472         }
10473         /**
10474          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10475          */
addQualifierValueBuilder( int index)10476         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder(
10477             int index) {
10478           return getQualifierValueFieldBuilder().addBuilder(
10479               index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance());
10480         }
10481         /**
10482          * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code>
10483          */
10484         public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder>
getQualifierValueBuilderList()10485              getQualifierValueBuilderList() {
10486           return getQualifierValueFieldBuilder().getBuilderList();
10487         }
10488         private com.google.protobuf.RepeatedFieldBuilder<
10489             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>
getQualifierValueFieldBuilder()10490             getQualifierValueFieldBuilder() {
10491           if (qualifierValueBuilder_ == null) {
10492             qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
10493                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>(
10494                     qualifierValue_,
10495                     ((bitField0_ & 0x00000002) == 0x00000002),
10496                     getParentForChildren(),
10497                     isClean());
10498             qualifierValue_ = null;
10499           }
10500           return qualifierValueBuilder_;
10501         }
10502 
10503         // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue)
10504       }
10505 
10506       static {
10507         defaultInstance = new ColumnValue(true);
defaultInstance.initFields()10508         defaultInstance.initFields();
10509       }
10510 
10511       // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue)
10512     }
10513 
10514     private int bitField0_;
10515     // optional bytes row = 1;
10516     public static final int ROW_FIELD_NUMBER = 1;
10517     private com.google.protobuf.ByteString row_;
10518     /**
10519      * <code>optional bytes row = 1;</code>
10520      */
hasRow()10521     public boolean hasRow() {
10522       return ((bitField0_ & 0x00000001) == 0x00000001);
10523     }
10524     /**
10525      * <code>optional bytes row = 1;</code>
10526      */
getRow()10527     public com.google.protobuf.ByteString getRow() {
10528       return row_;
10529     }
10530 
10531     // optional .MutationProto.MutationType mutate_type = 2;
10532     public static final int MUTATE_TYPE_FIELD_NUMBER = 2;
10533     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_;
10534     /**
10535      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10536      */
hasMutateType()10537     public boolean hasMutateType() {
10538       return ((bitField0_ & 0x00000002) == 0x00000002);
10539     }
10540     /**
10541      * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
10542      */
getMutateType()10543     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
10544       return mutateType_;
10545     }
10546 
10547     // repeated .MutationProto.ColumnValue column_value = 3;
10548     public static final int COLUMN_VALUE_FIELD_NUMBER = 3;
10549     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_;
10550     /**
10551      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10552      */
getColumnValueList()10553     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
10554       return columnValue_;
10555     }
10556     /**
10557      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10558      */
10559     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueOrBuilderList()10560         getColumnValueOrBuilderList() {
10561       return columnValue_;
10562     }
10563     /**
10564      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10565      */
getColumnValueCount()10566     public int getColumnValueCount() {
10567       return columnValue_.size();
10568     }
10569     /**
10570      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10571      */
getColumnValue(int index)10572     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
10573       return columnValue_.get(index);
10574     }
10575     /**
10576      * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
10577      */
getColumnValueOrBuilder( int index)10578     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
10579         int index) {
10580       return columnValue_.get(index);
10581     }
10582 
10583     // optional uint64 timestamp = 4;
10584     public static final int TIMESTAMP_FIELD_NUMBER = 4;
10585     private long timestamp_;
10586     /**
10587      * <code>optional uint64 timestamp = 4;</code>
10588      */
hasTimestamp()10589     public boolean hasTimestamp() {
10590       return ((bitField0_ & 0x00000004) == 0x00000004);
10591     }
10592     /**
10593      * <code>optional uint64 timestamp = 4;</code>
10594      */
getTimestamp()10595     public long getTimestamp() {
10596       return timestamp_;
10597     }
10598 
10599     // repeated .NameBytesPair attribute = 5;
10600     public static final int ATTRIBUTE_FIELD_NUMBER = 5;
10601     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
10602     /**
10603      * <code>repeated .NameBytesPair attribute = 5;</code>
10604      */
getAttributeList()10605     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
10606       return attribute_;
10607     }
10608     /**
10609      * <code>repeated .NameBytesPair attribute = 5;</code>
10610      */
10611     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()10612         getAttributeOrBuilderList() {
10613       return attribute_;
10614     }
10615     /**
10616      * <code>repeated .NameBytesPair attribute = 5;</code>
10617      */
getAttributeCount()10618     public int getAttributeCount() {
10619       return attribute_.size();
10620     }
10621     /**
10622      * <code>repeated .NameBytesPair attribute = 5;</code>
10623      */
getAttribute(int index)10624     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
10625       return attribute_.get(index);
10626     }
10627     /**
10628      * <code>repeated .NameBytesPair attribute = 5;</code>
10629      */
getAttributeOrBuilder( int index)10630     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
10631         int index) {
10632       return attribute_.get(index);
10633     }
10634 
10635     // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];
10636     public static final int DURABILITY_FIELD_NUMBER = 6;
10637     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_;
10638     /**
10639      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
10640      */
hasDurability()10641     public boolean hasDurability() {
10642       return ((bitField0_ & 0x00000008) == 0x00000008);
10643     }
10644     /**
10645      * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
10646      */
getDurability()10647     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
10648       return durability_;
10649     }
10650 
10651     // optional .TimeRange time_range = 7;
10652     public static final int TIME_RANGE_FIELD_NUMBER = 7;
10653     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
10654     /**
10655      * <code>optional .TimeRange time_range = 7;</code>
10656      *
10657      * <pre>
10658      * For some mutations, a result may be returned, in which case,
10659      * time range can be specified for potential performance gain
10660      * </pre>
10661      */
hasTimeRange()10662     public boolean hasTimeRange() {
10663       return ((bitField0_ & 0x00000010) == 0x00000010);
10664     }
10665     /**
10666      * <code>optional .TimeRange time_range = 7;</code>
10667      *
10668      * <pre>
10669      * For some mutations, a result may be returned, in which case,
10670      * time range can be specified for potential performance gain
10671      * </pre>
10672      */
getTimeRange()10673     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
10674       return timeRange_;
10675     }
10676     /**
10677      * <code>optional .TimeRange time_range = 7;</code>
10678      *
10679      * <pre>
10680      * For some mutations, a result may be returned, in which case,
10681      * time range can be specified for potential performance gain
10682      * </pre>
10683      */
getTimeRangeOrBuilder()10684     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
10685       return timeRange_;
10686     }
10687 
10688     // optional int32 associated_cell_count = 8;
10689     public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8;
10690     private int associatedCellCount_;
10691     /**
10692      * <code>optional int32 associated_cell_count = 8;</code>
10693      *
10694      * <pre>
10695      * The below count is set when the associated cells are NOT
10696      * part of this protobuf message; they are passed alongside
10697      * and then this Message is a placeholder with metadata.  The
10698      * count is needed to know how many to peel off the block of Cells as
10699      * ours.  NOTE: This is different from the pb managed cell_count of the
10700      * 'cell' field above which is non-null when the cells are pb'd.
10701      * </pre>
10702      */
hasAssociatedCellCount()10703     public boolean hasAssociatedCellCount() {
10704       return ((bitField0_ & 0x00000020) == 0x00000020);
10705     }
10706     /**
10707      * <code>optional int32 associated_cell_count = 8;</code>
10708      *
10709      * <pre>
10710      * The below count is set when the associated cells are NOT
10711      * part of this protobuf message; they are passed alongside
10712      * and then this Message is a placeholder with metadata.  The
10713      * count is needed to know how many to peel off the block of Cells as
10714      * ours.  NOTE: This is different from the pb managed cell_count of the
10715      * 'cell' field above which is non-null when the cells are pb'd.
10716      * </pre>
10717      */
getAssociatedCellCount()10718     public int getAssociatedCellCount() {
10719       return associatedCellCount_;
10720     }
10721 
10722     // optional uint64 nonce = 9;
10723     public static final int NONCE_FIELD_NUMBER = 9;
10724     private long nonce_;
10725     /**
10726      * <code>optional uint64 nonce = 9;</code>
10727      */
hasNonce()10728     public boolean hasNonce() {
10729       return ((bitField0_ & 0x00000040) == 0x00000040);
10730     }
10731     /**
10732      * <code>optional uint64 nonce = 9;</code>
10733      */
getNonce()10734     public long getNonce() {
10735       return nonce_;
10736     }
10737 
initFields()10738     private void initFields() {
10739       row_ = com.google.protobuf.ByteString.EMPTY;
10740       mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
10741       columnValue_ = java.util.Collections.emptyList();
10742       timestamp_ = 0L;
10743       attribute_ = java.util.Collections.emptyList();
10744       durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
10745       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
10746       associatedCellCount_ = 0;
10747       nonce_ = 0L;
10748     }
10749     private byte memoizedIsInitialized = -1;
isInitialized()10750     public final boolean isInitialized() {
10751       byte isInitialized = memoizedIsInitialized;
10752       if (isInitialized != -1) return isInitialized == 1;
10753 
10754       for (int i = 0; i < getColumnValueCount(); i++) {
10755         if (!getColumnValue(i).isInitialized()) {
10756           memoizedIsInitialized = 0;
10757           return false;
10758         }
10759       }
10760       for (int i = 0; i < getAttributeCount(); i++) {
10761         if (!getAttribute(i).isInitialized()) {
10762           memoizedIsInitialized = 0;
10763           return false;
10764         }
10765       }
10766       memoizedIsInitialized = 1;
10767       return true;
10768     }
10769 
writeTo(com.google.protobuf.CodedOutputStream output)10770     public void writeTo(com.google.protobuf.CodedOutputStream output)
10771                         throws java.io.IOException {
10772       getSerializedSize();
10773       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10774         output.writeBytes(1, row_);
10775       }
10776       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10777         output.writeEnum(2, mutateType_.getNumber());
10778       }
10779       for (int i = 0; i < columnValue_.size(); i++) {
10780         output.writeMessage(3, columnValue_.get(i));
10781       }
10782       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10783         output.writeUInt64(4, timestamp_);
10784       }
10785       for (int i = 0; i < attribute_.size(); i++) {
10786         output.writeMessage(5, attribute_.get(i));
10787       }
10788       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10789         output.writeEnum(6, durability_.getNumber());
10790       }
10791       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10792         output.writeMessage(7, timeRange_);
10793       }
10794       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10795         output.writeInt32(8, associatedCellCount_);
10796       }
10797       if (((bitField0_ & 0x00000040) == 0x00000040)) {
10798         output.writeUInt64(9, nonce_);
10799       }
10800       getUnknownFields().writeTo(output);
10801     }
10802 
10803     private int memoizedSerializedSize = -1;
getSerializedSize()10804     public int getSerializedSize() {
10805       int size = memoizedSerializedSize;
10806       if (size != -1) return size;
10807 
10808       size = 0;
10809       if (((bitField0_ & 0x00000001) == 0x00000001)) {
10810         size += com.google.protobuf.CodedOutputStream
10811           .computeBytesSize(1, row_);
10812       }
10813       if (((bitField0_ & 0x00000002) == 0x00000002)) {
10814         size += com.google.protobuf.CodedOutputStream
10815           .computeEnumSize(2, mutateType_.getNumber());
10816       }
10817       for (int i = 0; i < columnValue_.size(); i++) {
10818         size += com.google.protobuf.CodedOutputStream
10819           .computeMessageSize(3, columnValue_.get(i));
10820       }
10821       if (((bitField0_ & 0x00000004) == 0x00000004)) {
10822         size += com.google.protobuf.CodedOutputStream
10823           .computeUInt64Size(4, timestamp_);
10824       }
10825       for (int i = 0; i < attribute_.size(); i++) {
10826         size += com.google.protobuf.CodedOutputStream
10827           .computeMessageSize(5, attribute_.get(i));
10828       }
10829       if (((bitField0_ & 0x00000008) == 0x00000008)) {
10830         size += com.google.protobuf.CodedOutputStream
10831           .computeEnumSize(6, durability_.getNumber());
10832       }
10833       if (((bitField0_ & 0x00000010) == 0x00000010)) {
10834         size += com.google.protobuf.CodedOutputStream
10835           .computeMessageSize(7, timeRange_);
10836       }
10837       if (((bitField0_ & 0x00000020) == 0x00000020)) {
10838         size += com.google.protobuf.CodedOutputStream
10839           .computeInt32Size(8, associatedCellCount_);
10840       }
10841       if (((bitField0_ & 0x00000040) == 0x00000040)) {
10842         size += com.google.protobuf.CodedOutputStream
10843           .computeUInt64Size(9, nonce_);
10844       }
10845       size += getUnknownFields().getSerializedSize();
10846       memoizedSerializedSize = size;
10847       return size;
10848     }
10849 
10850     private static final long serialVersionUID = 0L;
10851     @java.lang.Override
writeReplace()10852     protected java.lang.Object writeReplace()
10853         throws java.io.ObjectStreamException {
10854       return super.writeReplace();
10855     }
10856 
10857     @java.lang.Override
equals(final java.lang.Object obj)10858     public boolean equals(final java.lang.Object obj) {
10859       if (obj == this) {
10860        return true;
10861       }
10862       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)) {
10863         return super.equals(obj);
10864       }
10865       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) obj;
10866 
10867       boolean result = true;
10868       result = result && (hasRow() == other.hasRow());
10869       if (hasRow()) {
10870         result = result && getRow()
10871             .equals(other.getRow());
10872       }
10873       result = result && (hasMutateType() == other.hasMutateType());
10874       if (hasMutateType()) {
10875         result = result &&
10876             (getMutateType() == other.getMutateType());
10877       }
10878       result = result && getColumnValueList()
10879           .equals(other.getColumnValueList());
10880       result = result && (hasTimestamp() == other.hasTimestamp());
10881       if (hasTimestamp()) {
10882         result = result && (getTimestamp()
10883             == other.getTimestamp());
10884       }
10885       result = result && getAttributeList()
10886           .equals(other.getAttributeList());
10887       result = result && (hasDurability() == other.hasDurability());
10888       if (hasDurability()) {
10889         result = result &&
10890             (getDurability() == other.getDurability());
10891       }
10892       result = result && (hasTimeRange() == other.hasTimeRange());
10893       if (hasTimeRange()) {
10894         result = result && getTimeRange()
10895             .equals(other.getTimeRange());
10896       }
10897       result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
10898       if (hasAssociatedCellCount()) {
10899         result = result && (getAssociatedCellCount()
10900             == other.getAssociatedCellCount());
10901       }
10902       result = result && (hasNonce() == other.hasNonce());
10903       if (hasNonce()) {
10904         result = result && (getNonce()
10905             == other.getNonce());
10906       }
10907       result = result &&
10908           getUnknownFields().equals(other.getUnknownFields());
10909       return result;
10910     }
10911 
10912     private int memoizedHashCode = 0;
10913     @java.lang.Override
hashCode()10914     public int hashCode() {
10915       if (memoizedHashCode != 0) {
10916         return memoizedHashCode;
10917       }
10918       int hash = 41;
10919       hash = (19 * hash) + getDescriptorForType().hashCode();
10920       if (hasRow()) {
10921         hash = (37 * hash) + ROW_FIELD_NUMBER;
10922         hash = (53 * hash) + getRow().hashCode();
10923       }
10924       if (hasMutateType()) {
10925         hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER;
10926         hash = (53 * hash) + hashEnum(getMutateType());
10927       }
10928       if (getColumnValueCount() > 0) {
10929         hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER;
10930         hash = (53 * hash) + getColumnValueList().hashCode();
10931       }
10932       if (hasTimestamp()) {
10933         hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
10934         hash = (53 * hash) + hashLong(getTimestamp());
10935       }
10936       if (getAttributeCount() > 0) {
10937         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
10938         hash = (53 * hash) + getAttributeList().hashCode();
10939       }
10940       if (hasDurability()) {
10941         hash = (37 * hash) + DURABILITY_FIELD_NUMBER;
10942         hash = (53 * hash) + hashEnum(getDurability());
10943       }
10944       if (hasTimeRange()) {
10945         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
10946         hash = (53 * hash) + getTimeRange().hashCode();
10947       }
10948       if (hasAssociatedCellCount()) {
10949         hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER;
10950         hash = (53 * hash) + getAssociatedCellCount();
10951       }
10952       if (hasNonce()) {
10953         hash = (37 * hash) + NONCE_FIELD_NUMBER;
10954         hash = (53 * hash) + hashLong(getNonce());
10955       }
10956       hash = (29 * hash) + getUnknownFields().hashCode();
10957       memoizedHashCode = hash;
10958       return hash;
10959     }
10960 
parseFrom( com.google.protobuf.ByteString data)10961     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10962         com.google.protobuf.ByteString data)
10963         throws com.google.protobuf.InvalidProtocolBufferException {
10964       return PARSER.parseFrom(data);
10965     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10966     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10967         com.google.protobuf.ByteString data,
10968         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10969         throws com.google.protobuf.InvalidProtocolBufferException {
10970       return PARSER.parseFrom(data, extensionRegistry);
10971     }
parseFrom(byte[] data)10972     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data)
10973         throws com.google.protobuf.InvalidProtocolBufferException {
10974       return PARSER.parseFrom(data);
10975     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10976     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10977         byte[] data,
10978         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10979         throws com.google.protobuf.InvalidProtocolBufferException {
10980       return PARSER.parseFrom(data, extensionRegistry);
10981     }
parseFrom(java.io.InputStream input)10982     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input)
10983         throws java.io.IOException {
10984       return PARSER.parseFrom(input);
10985     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10986     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
10987         java.io.InputStream input,
10988         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10989         throws java.io.IOException {
10990       return PARSER.parseFrom(input, extensionRegistry);
10991     }
parseDelimitedFrom(java.io.InputStream input)10992     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input)
10993         throws java.io.IOException {
10994       return PARSER.parseDelimitedFrom(input);
10995     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10996     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(
10997         java.io.InputStream input,
10998         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
10999         throws java.io.IOException {
11000       return PARSER.parseDelimitedFrom(input, extensionRegistry);
11001     }
parseFrom( com.google.protobuf.CodedInputStream input)11002     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
11003         com.google.protobuf.CodedInputStream input)
11004         throws java.io.IOException {
11005       return PARSER.parseFrom(input);
11006     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11007     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(
11008         com.google.protobuf.CodedInputStream input,
11009         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11010         throws java.io.IOException {
11011       return PARSER.parseFrom(input, extensionRegistry);
11012     }
11013 
newBuilder()11014     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()11015     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype)11016     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype) {
11017       return newBuilder().mergeFrom(prototype);
11018     }
toBuilder()11019     public Builder toBuilder() { return newBuilder(this); }
11020 
11021     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11022     protected Builder newBuilderForType(
11023         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11024       Builder builder = new Builder(parent);
11025       return builder;
11026     }
11027     /**
11028      * Protobuf type {@code MutationProto}
11029      *
11030      * <pre>
11031      **
11032      * A specific mutation inside a mutate request.
11033      * It can be an append, increment, put or delete based
11034      * on the mutation type.  It can be fully filled in or
11035      * only metadata present because data is being carried
11036      * elsewhere outside of pb.
11037      * </pre>
11038      */
11039     public static final class Builder extends
11040         com.google.protobuf.GeneratedMessage.Builder<Builder>
11041        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder {
11042       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()11043           getDescriptor() {
11044         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor;
11045       }
11046 
11047       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()11048           internalGetFieldAccessorTable() {
11049         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable
11050             .ensureFieldAccessorsInitialized(
11051                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class);
11052       }
11053 
11054       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder()
Builder()11055       private Builder() {
11056         maybeForceBuilderInitialization();
11057       }
11058 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11059       private Builder(
11060           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
11061         super(parent);
11062         maybeForceBuilderInitialization();
11063       }
maybeForceBuilderInitialization()11064       private void maybeForceBuilderInitialization() {
11065         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
11066           getColumnValueFieldBuilder();
11067           getAttributeFieldBuilder();
11068           getTimeRangeFieldBuilder();
11069         }
11070       }
create()11071       private static Builder create() {
11072         return new Builder();
11073       }
11074 
clear()11075       public Builder clear() {
11076         super.clear();
11077         row_ = com.google.protobuf.ByteString.EMPTY;
11078         bitField0_ = (bitField0_ & ~0x00000001);
11079         mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
11080         bitField0_ = (bitField0_ & ~0x00000002);
11081         if (columnValueBuilder_ == null) {
11082           columnValue_ = java.util.Collections.emptyList();
11083           bitField0_ = (bitField0_ & ~0x00000004);
11084         } else {
11085           columnValueBuilder_.clear();
11086         }
11087         timestamp_ = 0L;
11088         bitField0_ = (bitField0_ & ~0x00000008);
11089         if (attributeBuilder_ == null) {
11090           attribute_ = java.util.Collections.emptyList();
11091           bitField0_ = (bitField0_ & ~0x00000010);
11092         } else {
11093           attributeBuilder_.clear();
11094         }
11095         durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11096         bitField0_ = (bitField0_ & ~0x00000020);
11097         if (timeRangeBuilder_ == null) {
11098           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
11099         } else {
11100           timeRangeBuilder_.clear();
11101         }
11102         bitField0_ = (bitField0_ & ~0x00000040);
11103         associatedCellCount_ = 0;
11104         bitField0_ = (bitField0_ & ~0x00000080);
11105         nonce_ = 0L;
11106         bitField0_ = (bitField0_ & ~0x00000100);
11107         return this;
11108       }
11109 
clone()11110       public Builder clone() {
11111         return create().mergeFrom(buildPartial());
11112       }
11113 
11114       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()11115           getDescriptorForType() {
11116         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor;
11117       }
11118 
getDefaultInstanceForType()11119       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() {
11120         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
11121       }
11122 
build()11123       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto build() {
11124         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial();
11125         if (!result.isInitialized()) {
11126           throw newUninitializedMessageException(result);
11127         }
11128         return result;
11129       }
11130 
buildPartial()11131       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildPartial() {
11132         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto(this);
11133         int from_bitField0_ = bitField0_;
11134         int to_bitField0_ = 0;
11135         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
11136           to_bitField0_ |= 0x00000001;
11137         }
11138         result.row_ = row_;
11139         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
11140           to_bitField0_ |= 0x00000002;
11141         }
11142         result.mutateType_ = mutateType_;
11143         if (columnValueBuilder_ == null) {
11144           if (((bitField0_ & 0x00000004) == 0x00000004)) {
11145             columnValue_ = java.util.Collections.unmodifiableList(columnValue_);
11146             bitField0_ = (bitField0_ & ~0x00000004);
11147           }
11148           result.columnValue_ = columnValue_;
11149         } else {
11150           result.columnValue_ = columnValueBuilder_.build();
11151         }
11152         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
11153           to_bitField0_ |= 0x00000004;
11154         }
11155         result.timestamp_ = timestamp_;
11156         if (attributeBuilder_ == null) {
11157           if (((bitField0_ & 0x00000010) == 0x00000010)) {
11158             attribute_ = java.util.Collections.unmodifiableList(attribute_);
11159             bitField0_ = (bitField0_ & ~0x00000010);
11160           }
11161           result.attribute_ = attribute_;
11162         } else {
11163           result.attribute_ = attributeBuilder_.build();
11164         }
11165         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
11166           to_bitField0_ |= 0x00000008;
11167         }
11168         result.durability_ = durability_;
11169         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
11170           to_bitField0_ |= 0x00000010;
11171         }
11172         if (timeRangeBuilder_ == null) {
11173           result.timeRange_ = timeRange_;
11174         } else {
11175           result.timeRange_ = timeRangeBuilder_.build();
11176         }
11177         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
11178           to_bitField0_ |= 0x00000020;
11179         }
11180         result.associatedCellCount_ = associatedCellCount_;
11181         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
11182           to_bitField0_ |= 0x00000040;
11183         }
11184         result.nonce_ = nonce_;
11185         result.bitField0_ = to_bitField0_;
11186         onBuilt();
11187         return result;
11188       }
11189 
mergeFrom(com.google.protobuf.Message other)11190       public Builder mergeFrom(com.google.protobuf.Message other) {
11191         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) {
11192           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)other);
11193         } else {
11194           super.mergeFrom(other);
11195           return this;
11196         }
11197       }
11198 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other)11199       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other) {
11200         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this;
11201         if (other.hasRow()) {
11202           setRow(other.getRow());
11203         }
11204         if (other.hasMutateType()) {
11205           setMutateType(other.getMutateType());
11206         }
11207         if (columnValueBuilder_ == null) {
11208           if (!other.columnValue_.isEmpty()) {
11209             if (columnValue_.isEmpty()) {
11210               columnValue_ = other.columnValue_;
11211               bitField0_ = (bitField0_ & ~0x00000004);
11212             } else {
11213               ensureColumnValueIsMutable();
11214               columnValue_.addAll(other.columnValue_);
11215             }
11216             onChanged();
11217           }
11218         } else {
11219           if (!other.columnValue_.isEmpty()) {
11220             if (columnValueBuilder_.isEmpty()) {
11221               columnValueBuilder_.dispose();
11222               columnValueBuilder_ = null;
11223               columnValue_ = other.columnValue_;
11224               bitField0_ = (bitField0_ & ~0x00000004);
11225               columnValueBuilder_ =
11226                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
11227                    getColumnValueFieldBuilder() : null;
11228             } else {
11229               columnValueBuilder_.addAllMessages(other.columnValue_);
11230             }
11231           }
11232         }
11233         if (other.hasTimestamp()) {
11234           setTimestamp(other.getTimestamp());
11235         }
11236         if (attributeBuilder_ == null) {
11237           if (!other.attribute_.isEmpty()) {
11238             if (attribute_.isEmpty()) {
11239               attribute_ = other.attribute_;
11240               bitField0_ = (bitField0_ & ~0x00000010);
11241             } else {
11242               ensureAttributeIsMutable();
11243               attribute_.addAll(other.attribute_);
11244             }
11245             onChanged();
11246           }
11247         } else {
11248           if (!other.attribute_.isEmpty()) {
11249             if (attributeBuilder_.isEmpty()) {
11250               attributeBuilder_.dispose();
11251               attributeBuilder_ = null;
11252               attribute_ = other.attribute_;
11253               bitField0_ = (bitField0_ & ~0x00000010);
11254               attributeBuilder_ =
11255                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
11256                    getAttributeFieldBuilder() : null;
11257             } else {
11258               attributeBuilder_.addAllMessages(other.attribute_);
11259             }
11260           }
11261         }
11262         if (other.hasDurability()) {
11263           setDurability(other.getDurability());
11264         }
11265         if (other.hasTimeRange()) {
11266           mergeTimeRange(other.getTimeRange());
11267         }
11268         if (other.hasAssociatedCellCount()) {
11269           setAssociatedCellCount(other.getAssociatedCellCount());
11270         }
11271         if (other.hasNonce()) {
11272           setNonce(other.getNonce());
11273         }
11274         this.mergeUnknownFields(other.getUnknownFields());
11275         return this;
11276       }
11277 
isInitialized()11278       public final boolean isInitialized() {
11279         for (int i = 0; i < getColumnValueCount(); i++) {
11280           if (!getColumnValue(i).isInitialized()) {
11281 
11282             return false;
11283           }
11284         }
11285         for (int i = 0; i < getAttributeCount(); i++) {
11286           if (!getAttribute(i).isInitialized()) {
11287 
11288             return false;
11289           }
11290         }
11291         return true;
11292       }
11293 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11294       public Builder mergeFrom(
11295           com.google.protobuf.CodedInputStream input,
11296           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
11297           throws java.io.IOException {
11298         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parsedMessage = null;
11299         try {
11300           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
11301         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
11302           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage();
11303           throw e;
11304         } finally {
11305           if (parsedMessage != null) {
11306             mergeFrom(parsedMessage);
11307           }
11308         }
11309         return this;
11310       }
11311       private int bitField0_;
11312 
11313       // optional bytes row = 1;
11314       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
11315       /**
11316        * <code>optional bytes row = 1;</code>
11317        */
hasRow()11318       public boolean hasRow() {
11319         return ((bitField0_ & 0x00000001) == 0x00000001);
11320       }
11321       /**
11322        * <code>optional bytes row = 1;</code>
11323        */
getRow()11324       public com.google.protobuf.ByteString getRow() {
11325         return row_;
11326       }
11327       /**
11328        * <code>optional bytes row = 1;</code>
11329        */
setRow(com.google.protobuf.ByteString value)11330       public Builder setRow(com.google.protobuf.ByteString value) {
11331         if (value == null) {
11332     throw new NullPointerException();
11333   }
11334   bitField0_ |= 0x00000001;
11335         row_ = value;
11336         onChanged();
11337         return this;
11338       }
11339       /**
11340        * <code>optional bytes row = 1;</code>
11341        */
clearRow()11342       public Builder clearRow() {
11343         bitField0_ = (bitField0_ & ~0x00000001);
11344         row_ = getDefaultInstance().getRow();
11345         onChanged();
11346         return this;
11347       }
11348 
11349       // optional .MutationProto.MutationType mutate_type = 2;
11350       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
11351       /**
11352        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
11353        */
hasMutateType()11354       public boolean hasMutateType() {
11355         return ((bitField0_ & 0x00000002) == 0x00000002);
11356       }
11357       /**
11358        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
11359        */
getMutateType()11360       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() {
11361         return mutateType_;
11362       }
11363       /**
11364        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
11365        */
setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value)11366       public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value) {
11367         if (value == null) {
11368           throw new NullPointerException();
11369         }
11370         bitField0_ |= 0x00000002;
11371         mutateType_ = value;
11372         onChanged();
11373         return this;
11374       }
11375       /**
11376        * <code>optional .MutationProto.MutationType mutate_type = 2;</code>
11377        */
clearMutateType()11378       public Builder clearMutateType() {
11379         bitField0_ = (bitField0_ & ~0x00000002);
11380         mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND;
11381         onChanged();
11382         return this;
11383       }
11384 
11385       // repeated .MutationProto.ColumnValue column_value = 3;
11386       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_ =
11387         java.util.Collections.emptyList();
ensureColumnValueIsMutable()11388       private void ensureColumnValueIsMutable() {
11389         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
11390           columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(columnValue_);
11391           bitField0_ |= 0x00000004;
11392          }
11393       }
11394 
11395       private com.google.protobuf.RepeatedFieldBuilder<
11396           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_;
11397 
11398       /**
11399        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11400        */
getColumnValueList()11401       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() {
11402         if (columnValueBuilder_ == null) {
11403           return java.util.Collections.unmodifiableList(columnValue_);
11404         } else {
11405           return columnValueBuilder_.getMessageList();
11406         }
11407       }
11408       /**
11409        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11410        */
getColumnValueCount()11411       public int getColumnValueCount() {
11412         if (columnValueBuilder_ == null) {
11413           return columnValue_.size();
11414         } else {
11415           return columnValueBuilder_.getCount();
11416         }
11417       }
11418       /**
11419        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11420        */
getColumnValue(int index)11421       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) {
11422         if (columnValueBuilder_ == null) {
11423           return columnValue_.get(index);
11424         } else {
11425           return columnValueBuilder_.getMessage(index);
11426         }
11427       }
11428       /**
11429        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11430        */
setColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value)11431       public Builder setColumnValue(
11432           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
11433         if (columnValueBuilder_ == null) {
11434           if (value == null) {
11435             throw new NullPointerException();
11436           }
11437           ensureColumnValueIsMutable();
11438           columnValue_.set(index, value);
11439           onChanged();
11440         } else {
11441           columnValueBuilder_.setMessage(index, value);
11442         }
11443         return this;
11444       }
11445       /**
11446        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11447        */
setColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue)11448       public Builder setColumnValue(
11449           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
11450         if (columnValueBuilder_ == null) {
11451           ensureColumnValueIsMutable();
11452           columnValue_.set(index, builderForValue.build());
11453           onChanged();
11454         } else {
11455           columnValueBuilder_.setMessage(index, builderForValue.build());
11456         }
11457         return this;
11458       }
11459       /**
11460        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11461        */
addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value)11462       public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
11463         if (columnValueBuilder_ == null) {
11464           if (value == null) {
11465             throw new NullPointerException();
11466           }
11467           ensureColumnValueIsMutable();
11468           columnValue_.add(value);
11469           onChanged();
11470         } else {
11471           columnValueBuilder_.addMessage(value);
11472         }
11473         return this;
11474       }
11475       /**
11476        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11477        */
addColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value)11478       public Builder addColumnValue(
11479           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) {
11480         if (columnValueBuilder_ == null) {
11481           if (value == null) {
11482             throw new NullPointerException();
11483           }
11484           ensureColumnValueIsMutable();
11485           columnValue_.add(index, value);
11486           onChanged();
11487         } else {
11488           columnValueBuilder_.addMessage(index, value);
11489         }
11490         return this;
11491       }
11492       /**
11493        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11494        */
addColumnValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue)11495       public Builder addColumnValue(
11496           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
11497         if (columnValueBuilder_ == null) {
11498           ensureColumnValueIsMutable();
11499           columnValue_.add(builderForValue.build());
11500           onChanged();
11501         } else {
11502           columnValueBuilder_.addMessage(builderForValue.build());
11503         }
11504         return this;
11505       }
11506       /**
11507        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11508        */
addColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue)11509       public Builder addColumnValue(
11510           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) {
11511         if (columnValueBuilder_ == null) {
11512           ensureColumnValueIsMutable();
11513           columnValue_.add(index, builderForValue.build());
11514           onChanged();
11515         } else {
11516           columnValueBuilder_.addMessage(index, builderForValue.build());
11517         }
11518         return this;
11519       }
11520       /**
11521        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11522        */
addAllColumnValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values)11523       public Builder addAllColumnValue(
11524           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values) {
11525         if (columnValueBuilder_ == null) {
11526           ensureColumnValueIsMutable();
11527           super.addAll(values, columnValue_);
11528           onChanged();
11529         } else {
11530           columnValueBuilder_.addAllMessages(values);
11531         }
11532         return this;
11533       }
11534       /**
11535        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11536        */
clearColumnValue()11537       public Builder clearColumnValue() {
11538         if (columnValueBuilder_ == null) {
11539           columnValue_ = java.util.Collections.emptyList();
11540           bitField0_ = (bitField0_ & ~0x00000004);
11541           onChanged();
11542         } else {
11543           columnValueBuilder_.clear();
11544         }
11545         return this;
11546       }
11547       /**
11548        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11549        */
removeColumnValue(int index)11550       public Builder removeColumnValue(int index) {
11551         if (columnValueBuilder_ == null) {
11552           ensureColumnValueIsMutable();
11553           columnValue_.remove(index);
11554           onChanged();
11555         } else {
11556           columnValueBuilder_.remove(index);
11557         }
11558         return this;
11559       }
11560       /**
11561        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11562        */
getColumnValueBuilder( int index)11563       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder(
11564           int index) {
11565         return getColumnValueFieldBuilder().getBuilder(index);
11566       }
11567       /**
11568        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11569        */
getColumnValueOrBuilder( int index)11570       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder(
11571           int index) {
11572         if (columnValueBuilder_ == null) {
11573           return columnValue_.get(index);  } else {
11574           return columnValueBuilder_.getMessageOrBuilder(index);
11575         }
11576       }
11577       /**
11578        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11579        */
11580       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueOrBuilderList()11581            getColumnValueOrBuilderList() {
11582         if (columnValueBuilder_ != null) {
11583           return columnValueBuilder_.getMessageOrBuilderList();
11584         } else {
11585           return java.util.Collections.unmodifiableList(columnValue_);
11586         }
11587       }
11588       /**
11589        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11590        */
addColumnValueBuilder()11591       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() {
11592         return getColumnValueFieldBuilder().addBuilder(
11593             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
11594       }
11595       /**
11596        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11597        */
addColumnValueBuilder( int index)11598       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder(
11599           int index) {
11600         return getColumnValueFieldBuilder().addBuilder(
11601             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance());
11602       }
11603       /**
11604        * <code>repeated .MutationProto.ColumnValue column_value = 3;</code>
11605        */
11606       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder>
getColumnValueBuilderList()11607            getColumnValueBuilderList() {
11608         return getColumnValueFieldBuilder().getBuilderList();
11609       }
11610       private com.google.protobuf.RepeatedFieldBuilder<
11611           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>
getColumnValueFieldBuilder()11612           getColumnValueFieldBuilder() {
11613         if (columnValueBuilder_ == null) {
11614           columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11615               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>(
11616                   columnValue_,
11617                   ((bitField0_ & 0x00000004) == 0x00000004),
11618                   getParentForChildren(),
11619                   isClean());
11620           columnValue_ = null;
11621         }
11622         return columnValueBuilder_;
11623       }
11624 
11625       // optional uint64 timestamp = 4;
11626       private long timestamp_ ;
11627       /**
11628        * <code>optional uint64 timestamp = 4;</code>
11629        */
hasTimestamp()11630       public boolean hasTimestamp() {
11631         return ((bitField0_ & 0x00000008) == 0x00000008);
11632       }
11633       /**
11634        * <code>optional uint64 timestamp = 4;</code>
11635        */
getTimestamp()11636       public long getTimestamp() {
11637         return timestamp_;
11638       }
11639       /**
11640        * <code>optional uint64 timestamp = 4;</code>
11641        */
setTimestamp(long value)11642       public Builder setTimestamp(long value) {
11643         bitField0_ |= 0x00000008;
11644         timestamp_ = value;
11645         onChanged();
11646         return this;
11647       }
11648       /**
11649        * <code>optional uint64 timestamp = 4;</code>
11650        */
clearTimestamp()11651       public Builder clearTimestamp() {
11652         bitField0_ = (bitField0_ & ~0x00000008);
11653         timestamp_ = 0L;
11654         onChanged();
11655         return this;
11656       }
11657 
11658       // repeated .NameBytesPair attribute = 5;
11659       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
11660         java.util.Collections.emptyList();
ensureAttributeIsMutable()11661       private void ensureAttributeIsMutable() {
11662         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
11663           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
11664           bitField0_ |= 0x00000010;
11665          }
11666       }
11667 
11668       private com.google.protobuf.RepeatedFieldBuilder<
11669           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
11670 
11671       /**
11672        * <code>repeated .NameBytesPair attribute = 5;</code>
11673        */
getAttributeList()11674       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
11675         if (attributeBuilder_ == null) {
11676           return java.util.Collections.unmodifiableList(attribute_);
11677         } else {
11678           return attributeBuilder_.getMessageList();
11679         }
11680       }
11681       /**
11682        * <code>repeated .NameBytesPair attribute = 5;</code>
11683        */
getAttributeCount()11684       public int getAttributeCount() {
11685         if (attributeBuilder_ == null) {
11686           return attribute_.size();
11687         } else {
11688           return attributeBuilder_.getCount();
11689         }
11690       }
11691       /**
11692        * <code>repeated .NameBytesPair attribute = 5;</code>
11693        */
getAttribute(int index)11694       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
11695         if (attributeBuilder_ == null) {
11696           return attribute_.get(index);
11697         } else {
11698           return attributeBuilder_.getMessage(index);
11699         }
11700       }
11701       /**
11702        * <code>repeated .NameBytesPair attribute = 5;</code>
11703        */
setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)11704       public Builder setAttribute(
11705           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11706         if (attributeBuilder_ == null) {
11707           if (value == null) {
11708             throw new NullPointerException();
11709           }
11710           ensureAttributeIsMutable();
11711           attribute_.set(index, value);
11712           onChanged();
11713         } else {
11714           attributeBuilder_.setMessage(index, value);
11715         }
11716         return this;
11717       }
11718       /**
11719        * <code>repeated .NameBytesPair attribute = 5;</code>
11720        */
setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)11721       public Builder setAttribute(
11722           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11723         if (attributeBuilder_ == null) {
11724           ensureAttributeIsMutable();
11725           attribute_.set(index, builderForValue.build());
11726           onChanged();
11727         } else {
11728           attributeBuilder_.setMessage(index, builderForValue.build());
11729         }
11730         return this;
11731       }
11732       /**
11733        * <code>repeated .NameBytesPair attribute = 5;</code>
11734        */
addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)11735       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11736         if (attributeBuilder_ == null) {
11737           if (value == null) {
11738             throw new NullPointerException();
11739           }
11740           ensureAttributeIsMutable();
11741           attribute_.add(value);
11742           onChanged();
11743         } else {
11744           attributeBuilder_.addMessage(value);
11745         }
11746         return this;
11747       }
11748       /**
11749        * <code>repeated .NameBytesPair attribute = 5;</code>
11750        */
addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)11751       public Builder addAttribute(
11752           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
11753         if (attributeBuilder_ == null) {
11754           if (value == null) {
11755             throw new NullPointerException();
11756           }
11757           ensureAttributeIsMutable();
11758           attribute_.add(index, value);
11759           onChanged();
11760         } else {
11761           attributeBuilder_.addMessage(index, value);
11762         }
11763         return this;
11764       }
11765       /**
11766        * <code>repeated .NameBytesPair attribute = 5;</code>
11767        */
addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)11768       public Builder addAttribute(
11769           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11770         if (attributeBuilder_ == null) {
11771           ensureAttributeIsMutable();
11772           attribute_.add(builderForValue.build());
11773           onChanged();
11774         } else {
11775           attributeBuilder_.addMessage(builderForValue.build());
11776         }
11777         return this;
11778       }
11779       /**
11780        * <code>repeated .NameBytesPair attribute = 5;</code>
11781        */
addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)11782       public Builder addAttribute(
11783           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
11784         if (attributeBuilder_ == null) {
11785           ensureAttributeIsMutable();
11786           attribute_.add(index, builderForValue.build());
11787           onChanged();
11788         } else {
11789           attributeBuilder_.addMessage(index, builderForValue.build());
11790         }
11791         return this;
11792       }
11793       /**
11794        * <code>repeated .NameBytesPair attribute = 5;</code>
11795        */
addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values)11796       public Builder addAllAttribute(
11797           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
11798         if (attributeBuilder_ == null) {
11799           ensureAttributeIsMutable();
11800           super.addAll(values, attribute_);
11801           onChanged();
11802         } else {
11803           attributeBuilder_.addAllMessages(values);
11804         }
11805         return this;
11806       }
11807       /**
11808        * <code>repeated .NameBytesPair attribute = 5;</code>
11809        */
clearAttribute()11810       public Builder clearAttribute() {
11811         if (attributeBuilder_ == null) {
11812           attribute_ = java.util.Collections.emptyList();
11813           bitField0_ = (bitField0_ & ~0x00000010);
11814           onChanged();
11815         } else {
11816           attributeBuilder_.clear();
11817         }
11818         return this;
11819       }
11820       /**
11821        * <code>repeated .NameBytesPair attribute = 5;</code>
11822        */
removeAttribute(int index)11823       public Builder removeAttribute(int index) {
11824         if (attributeBuilder_ == null) {
11825           ensureAttributeIsMutable();
11826           attribute_.remove(index);
11827           onChanged();
11828         } else {
11829           attributeBuilder_.remove(index);
11830         }
11831         return this;
11832       }
11833       /**
11834        * <code>repeated .NameBytesPair attribute = 5;</code>
11835        */
getAttributeBuilder( int index)11836       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
11837           int index) {
11838         return getAttributeFieldBuilder().getBuilder(index);
11839       }
11840       /**
11841        * <code>repeated .NameBytesPair attribute = 5;</code>
11842        */
getAttributeOrBuilder( int index)11843       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
11844           int index) {
11845         if (attributeBuilder_ == null) {
11846           return attribute_.get(index);  } else {
11847           return attributeBuilder_.getMessageOrBuilder(index);
11848         }
11849       }
11850       /**
11851        * <code>repeated .NameBytesPair attribute = 5;</code>
11852        */
11853       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()11854            getAttributeOrBuilderList() {
11855         if (attributeBuilder_ != null) {
11856           return attributeBuilder_.getMessageOrBuilderList();
11857         } else {
11858           return java.util.Collections.unmodifiableList(attribute_);
11859         }
11860       }
11861       /**
11862        * <code>repeated .NameBytesPair attribute = 5;</code>
11863        */
addAttributeBuilder()11864       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
11865         return getAttributeFieldBuilder().addBuilder(
11866             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
11867       }
11868       /**
11869        * <code>repeated .NameBytesPair attribute = 5;</code>
11870        */
addAttributeBuilder( int index)11871       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
11872           int index) {
11873         return getAttributeFieldBuilder().addBuilder(
11874             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
11875       }
11876       /**
11877        * <code>repeated .NameBytesPair attribute = 5;</code>
11878        */
11879       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder>
getAttributeBuilderList()11880            getAttributeBuilderList() {
11881         return getAttributeFieldBuilder().getBuilderList();
11882       }
11883       private com.google.protobuf.RepeatedFieldBuilder<
11884           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeFieldBuilder()11885           getAttributeFieldBuilder() {
11886         if (attributeBuilder_ == null) {
11887           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
11888               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
11889                   attribute_,
11890                   ((bitField0_ & 0x00000010) == 0x00000010),
11891                   getParentForChildren(),
11892                   isClean());
11893           attribute_ = null;
11894         }
11895         return attributeBuilder_;
11896       }
11897 
11898       // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];
11899       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11900       /**
11901        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11902        */
hasDurability()11903       public boolean hasDurability() {
11904         return ((bitField0_ & 0x00000020) == 0x00000020);
11905       }
11906       /**
11907        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11908        */
getDurability()11909       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() {
11910         return durability_;
11911       }
11912       /**
11913        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11914        */
setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value)11915       public Builder setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value) {
11916         if (value == null) {
11917           throw new NullPointerException();
11918         }
11919         bitField0_ |= 0x00000020;
11920         durability_ = value;
11921         onChanged();
11922         return this;
11923       }
11924       /**
11925        * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code>
11926        */
clearDurability()11927       public Builder clearDurability() {
11928         bitField0_ = (bitField0_ & ~0x00000020);
11929         durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT;
11930         onChanged();
11931         return this;
11932       }
11933 
11934       // optional .TimeRange time_range = 7;
11935       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
11936       private com.google.protobuf.SingleFieldBuilder<
11937           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
11938       /**
11939        * <code>optional .TimeRange time_range = 7;</code>
11940        *
11941        * <pre>
11942        * For some mutations, a result may be returned, in which case,
11943        * time range can be specified for potential performance gain
11944        * </pre>
11945        */
hasTimeRange()11946       public boolean hasTimeRange() {
11947         return ((bitField0_ & 0x00000040) == 0x00000040);
11948       }
11949       /**
11950        * <code>optional .TimeRange time_range = 7;</code>
11951        *
11952        * <pre>
11953        * For some mutations, a result may be returned, in which case,
11954        * time range can be specified for potential performance gain
11955        * </pre>
11956        */
getTimeRange()11957       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
11958         if (timeRangeBuilder_ == null) {
11959           return timeRange_;
11960         } else {
11961           return timeRangeBuilder_.getMessage();
11962         }
11963       }
11964       /**
11965        * <code>optional .TimeRange time_range = 7;</code>
11966        *
11967        * <pre>
11968        * For some mutations, a result may be returned, in which case,
11969        * time range can be specified for potential performance gain
11970        * </pre>
11971        */
setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)11972       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
11973         if (timeRangeBuilder_ == null) {
11974           if (value == null) {
11975             throw new NullPointerException();
11976           }
11977           timeRange_ = value;
11978           onChanged();
11979         } else {
11980           timeRangeBuilder_.setMessage(value);
11981         }
11982         bitField0_ |= 0x00000040;
11983         return this;
11984       }
11985       /**
11986        * <code>optional .TimeRange time_range = 7;</code>
11987        *
11988        * <pre>
11989        * For some mutations, a result may be returned, in which case,
11990        * time range can be specified for potential performance gain
11991        * </pre>
11992        */
setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)11993       public Builder setTimeRange(
11994           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
11995         if (timeRangeBuilder_ == null) {
11996           timeRange_ = builderForValue.build();
11997           onChanged();
11998         } else {
11999           timeRangeBuilder_.setMessage(builderForValue.build());
12000         }
12001         bitField0_ |= 0x00000040;
12002         return this;
12003       }
12004       /**
12005        * <code>optional .TimeRange time_range = 7;</code>
12006        *
12007        * <pre>
12008        * For some mutations, a result may be returned, in which case,
12009        * time range can be specified for potential performance gain
12010        * </pre>
12011        */
mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)12012       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
12013         if (timeRangeBuilder_ == null) {
12014           if (((bitField0_ & 0x00000040) == 0x00000040) &&
12015               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
12016             timeRange_ =
12017               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
12018           } else {
12019             timeRange_ = value;
12020           }
12021           onChanged();
12022         } else {
12023           timeRangeBuilder_.mergeFrom(value);
12024         }
12025         bitField0_ |= 0x00000040;
12026         return this;
12027       }
12028       /**
12029        * <code>optional .TimeRange time_range = 7;</code>
12030        *
12031        * <pre>
12032        * For some mutations, a result may be returned, in which case,
12033        * time range can be specified for potential performance gain
12034        * </pre>
12035        */
clearTimeRange()12036       public Builder clearTimeRange() {
12037         if (timeRangeBuilder_ == null) {
12038           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
12039           onChanged();
12040         } else {
12041           timeRangeBuilder_.clear();
12042         }
12043         bitField0_ = (bitField0_ & ~0x00000040);
12044         return this;
12045       }
12046       /**
12047        * <code>optional .TimeRange time_range = 7;</code>
12048        *
12049        * <pre>
12050        * For some mutations, a result may be returned, in which case,
12051        * time range can be specified for potential performance gain
12052        * </pre>
12053        */
getTimeRangeBuilder()12054       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
12055         bitField0_ |= 0x00000040;
12056         onChanged();
12057         return getTimeRangeFieldBuilder().getBuilder();
12058       }
12059       /**
12060        * <code>optional .TimeRange time_range = 7;</code>
12061        *
12062        * <pre>
12063        * For some mutations, a result may be returned, in which case,
12064        * time range can be specified for potential performance gain
12065        * </pre>
12066        */
getTimeRangeOrBuilder()12067       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
12068         if (timeRangeBuilder_ != null) {
12069           return timeRangeBuilder_.getMessageOrBuilder();
12070         } else {
12071           return timeRange_;
12072         }
12073       }
12074       /**
12075        * <code>optional .TimeRange time_range = 7;</code>
12076        *
12077        * <pre>
12078        * For some mutations, a result may be returned, in which case,
12079        * time range can be specified for potential performance gain
12080        * </pre>
12081        */
12082       private com.google.protobuf.SingleFieldBuilder<
12083           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>
getTimeRangeFieldBuilder()12084           getTimeRangeFieldBuilder() {
12085         if (timeRangeBuilder_ == null) {
12086           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
12087               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
12088                   timeRange_,
12089                   getParentForChildren(),
12090                   isClean());
12091           timeRange_ = null;
12092         }
12093         return timeRangeBuilder_;
12094       }
12095 
12096       // optional int32 associated_cell_count = 8;
12097       private int associatedCellCount_ ;
12098       /**
12099        * <code>optional int32 associated_cell_count = 8;</code>
12100        *
12101        * <pre>
12102        * The below count is set when the associated cells are NOT
12103        * part of this protobuf message; they are passed alongside
12104        * and then this Message is a placeholder with metadata.  The
12105        * count is needed to know how many to peel off the block of Cells as
12106        * ours.  NOTE: This is different from the pb managed cell_count of the
12107        * 'cell' field above which is non-null when the cells are pb'd.
12108        * </pre>
12109        */
hasAssociatedCellCount()12110       public boolean hasAssociatedCellCount() {
12111         return ((bitField0_ & 0x00000080) == 0x00000080);
12112       }
12113       /**
12114        * <code>optional int32 associated_cell_count = 8;</code>
12115        *
12116        * <pre>
12117        * The below count is set when the associated cells are NOT
12118        * part of this protobuf message; they are passed alongside
12119        * and then this Message is a placeholder with metadata.  The
12120        * count is needed to know how many to peel off the block of Cells as
12121        * ours.  NOTE: This is different from the pb managed cell_count of the
12122        * 'cell' field above which is non-null when the cells are pb'd.
12123        * </pre>
12124        */
getAssociatedCellCount()12125       public int getAssociatedCellCount() {
12126         return associatedCellCount_;
12127       }
12128       /**
12129        * <code>optional int32 associated_cell_count = 8;</code>
12130        *
12131        * <pre>
12132        * The below count is set when the associated cells are NOT
12133        * part of this protobuf message; they are passed alongside
12134        * and then this Message is a placeholder with metadata.  The
12135        * count is needed to know how many to peel off the block of Cells as
12136        * ours.  NOTE: This is different from the pb managed cell_count of the
12137        * 'cell' field above which is non-null when the cells are pb'd.
12138        * </pre>
12139        */
setAssociatedCellCount(int value)12140       public Builder setAssociatedCellCount(int value) {
12141         bitField0_ |= 0x00000080;
12142         associatedCellCount_ = value;
12143         onChanged();
12144         return this;
12145       }
12146       /**
12147        * <code>optional int32 associated_cell_count = 8;</code>
12148        *
12149        * <pre>
12150        * The below count is set when the associated cells are NOT
12151        * part of this protobuf message; they are passed alongside
12152        * and then this Message is a placeholder with metadata.  The
12153        * count is needed to know how many to peel off the block of Cells as
12154        * ours.  NOTE: This is different from the pb managed cell_count of the
12155        * 'cell' field above which is non-null when the cells are pb'd.
12156        * </pre>
12157        */
clearAssociatedCellCount()12158       public Builder clearAssociatedCellCount() {
12159         bitField0_ = (bitField0_ & ~0x00000080);
12160         associatedCellCount_ = 0;
12161         onChanged();
12162         return this;
12163       }
12164 
12165       // optional uint64 nonce = 9;
12166       private long nonce_ ;
12167       /**
12168        * <code>optional uint64 nonce = 9;</code>
12169        */
hasNonce()12170       public boolean hasNonce() {
12171         return ((bitField0_ & 0x00000100) == 0x00000100);
12172       }
12173       /**
12174        * <code>optional uint64 nonce = 9;</code>
12175        */
getNonce()12176       public long getNonce() {
12177         return nonce_;
12178       }
12179       /**
12180        * <code>optional uint64 nonce = 9;</code>
12181        */
setNonce(long value)12182       public Builder setNonce(long value) {
12183         bitField0_ |= 0x00000100;
12184         nonce_ = value;
12185         onChanged();
12186         return this;
12187       }
12188       /**
12189        * <code>optional uint64 nonce = 9;</code>
12190        */
clearNonce()12191       public Builder clearNonce() {
12192         bitField0_ = (bitField0_ & ~0x00000100);
12193         nonce_ = 0L;
12194         onChanged();
12195         return this;
12196       }
12197 
12198       // @@protoc_insertion_point(builder_scope:MutationProto)
12199     }
12200 
12201     static {
12202       defaultInstance = new MutationProto(true);
defaultInstance.initFields()12203       defaultInstance.initFields();
12204     }
12205 
12206     // @@protoc_insertion_point(class_scope:MutationProto)
12207   }
12208 
12209   public interface MutateRequestOrBuilder
12210       extends com.google.protobuf.MessageOrBuilder {
12211 
12212     // required .RegionSpecifier region = 1;
12213     /**
12214      * <code>required .RegionSpecifier region = 1;</code>
12215      */
hasRegion()12216     boolean hasRegion();
12217     /**
12218      * <code>required .RegionSpecifier region = 1;</code>
12219      */
getRegion()12220     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
12221     /**
12222      * <code>required .RegionSpecifier region = 1;</code>
12223      */
getRegionOrBuilder()12224     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
12225 
12226     // required .MutationProto mutation = 2;
12227     /**
12228      * <code>required .MutationProto mutation = 2;</code>
12229      */
hasMutation()12230     boolean hasMutation();
12231     /**
12232      * <code>required .MutationProto mutation = 2;</code>
12233      */
getMutation()12234     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
12235     /**
12236      * <code>required .MutationProto mutation = 2;</code>
12237      */
getMutationOrBuilder()12238     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
12239 
12240     // optional .Condition condition = 3;
12241     /**
12242      * <code>optional .Condition condition = 3;</code>
12243      */
hasCondition()12244     boolean hasCondition();
12245     /**
12246      * <code>optional .Condition condition = 3;</code>
12247      */
getCondition()12248     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
12249     /**
12250      * <code>optional .Condition condition = 3;</code>
12251      */
getConditionOrBuilder()12252     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
12253 
12254     // optional uint64 nonce_group = 4;
12255     /**
12256      * <code>optional uint64 nonce_group = 4;</code>
12257      */
hasNonceGroup()12258     boolean hasNonceGroup();
12259     /**
12260      * <code>optional uint64 nonce_group = 4;</code>
12261      */
getNonceGroup()12262     long getNonceGroup();
12263   }
12264   /**
12265    * Protobuf type {@code MutateRequest}
12266    *
12267    * <pre>
12268    **
12269    * The mutate request. Perform a single Mutate operation.
12270    *
12271    * Optionally, you can specify a condition. The mutate
12272    * will take place only if the condition is met.  Otherwise,
12273    * the mutate will be ignored.  In the response result,
12274    * parameter processed is used to indicate if the mutate
12275    * actually happened.
12276    * </pre>
12277    */
12278   public static final class MutateRequest extends
12279       com.google.protobuf.GeneratedMessage
12280       implements MutateRequestOrBuilder {
12281     // Use MutateRequest.newBuilder() to construct.
MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)12282     private MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
12283       super(builder);
12284       this.unknownFields = builder.getUnknownFields();
12285     }
MutateRequest(boolean noInit)12286     private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
12287 
12288     private static final MutateRequest defaultInstance;
getDefaultInstance()12289     public static MutateRequest getDefaultInstance() {
12290       return defaultInstance;
12291     }
12292 
getDefaultInstanceForType()12293     public MutateRequest getDefaultInstanceForType() {
12294       return defaultInstance;
12295     }
12296 
12297     private final com.google.protobuf.UnknownFieldSet unknownFields;
12298     @java.lang.Override
12299     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()12300         getUnknownFields() {
12301       return this.unknownFields;
12302     }
MutateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12303     private MutateRequest(
12304         com.google.protobuf.CodedInputStream input,
12305         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12306         throws com.google.protobuf.InvalidProtocolBufferException {
12307       initFields();
12308       int mutable_bitField0_ = 0;
12309       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
12310           com.google.protobuf.UnknownFieldSet.newBuilder();
12311       try {
12312         boolean done = false;
12313         while (!done) {
12314           int tag = input.readTag();
12315           switch (tag) {
12316             case 0:
12317               done = true;
12318               break;
12319             default: {
12320               if (!parseUnknownField(input, unknownFields,
12321                                      extensionRegistry, tag)) {
12322                 done = true;
12323               }
12324               break;
12325             }
12326             case 10: {
12327               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
12328               if (((bitField0_ & 0x00000001) == 0x00000001)) {
12329                 subBuilder = region_.toBuilder();
12330               }
12331               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
12332               if (subBuilder != null) {
12333                 subBuilder.mergeFrom(region_);
12334                 region_ = subBuilder.buildPartial();
12335               }
12336               bitField0_ |= 0x00000001;
12337               break;
12338             }
12339             case 18: {
12340               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
12341               if (((bitField0_ & 0x00000002) == 0x00000002)) {
12342                 subBuilder = mutation_.toBuilder();
12343               }
12344               mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
12345               if (subBuilder != null) {
12346                 subBuilder.mergeFrom(mutation_);
12347                 mutation_ = subBuilder.buildPartial();
12348               }
12349               bitField0_ |= 0x00000002;
12350               break;
12351             }
12352             case 26: {
12353               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
12354               if (((bitField0_ & 0x00000004) == 0x00000004)) {
12355                 subBuilder = condition_.toBuilder();
12356               }
12357               condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
12358               if (subBuilder != null) {
12359                 subBuilder.mergeFrom(condition_);
12360                 condition_ = subBuilder.buildPartial();
12361               }
12362               bitField0_ |= 0x00000004;
12363               break;
12364             }
12365             case 32: {
12366               bitField0_ |= 0x00000008;
12367               nonceGroup_ = input.readUInt64();
12368               break;
12369             }
12370           }
12371         }
12372       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12373         throw e.setUnfinishedMessage(this);
12374       } catch (java.io.IOException e) {
12375         throw new com.google.protobuf.InvalidProtocolBufferException(
12376             e.getMessage()).setUnfinishedMessage(this);
12377       } finally {
12378         this.unknownFields = unknownFields.build();
12379         makeExtensionsImmutable();
12380       }
12381     }
12382     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12383         getDescriptor() {
12384       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor;
12385     }
12386 
12387     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12388         internalGetFieldAccessorTable() {
12389       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable
12390           .ensureFieldAccessorsInitialized(
12391               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
12392     }
12393 
12394     public static com.google.protobuf.Parser<MutateRequest> PARSER =
12395         new com.google.protobuf.AbstractParser<MutateRequest>() {
12396       public MutateRequest parsePartialFrom(
12397           com.google.protobuf.CodedInputStream input,
12398           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12399           throws com.google.protobuf.InvalidProtocolBufferException {
12400         return new MutateRequest(input, extensionRegistry);
12401       }
12402     };
12403 
12404     @java.lang.Override
getParserForType()12405     public com.google.protobuf.Parser<MutateRequest> getParserForType() {
12406       return PARSER;
12407     }
12408 
12409     private int bitField0_;
12410     // required .RegionSpecifier region = 1;
12411     public static final int REGION_FIELD_NUMBER = 1;
12412     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
12413     /**
12414      * <code>required .RegionSpecifier region = 1;</code>
12415      */
hasRegion()12416     public boolean hasRegion() {
12417       return ((bitField0_ & 0x00000001) == 0x00000001);
12418     }
12419     /**
12420      * <code>required .RegionSpecifier region = 1;</code>
12421      */
getRegion()12422     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
12423       return region_;
12424     }
12425     /**
12426      * <code>required .RegionSpecifier region = 1;</code>
12427      */
getRegionOrBuilder()12428     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
12429       return region_;
12430     }
12431 
12432     // required .MutationProto mutation = 2;
12433     public static final int MUTATION_FIELD_NUMBER = 2;
12434     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
12435     /**
12436      * <code>required .MutationProto mutation = 2;</code>
12437      */
hasMutation()12438     public boolean hasMutation() {
12439       return ((bitField0_ & 0x00000002) == 0x00000002);
12440     }
12441     /**
12442      * <code>required .MutationProto mutation = 2;</code>
12443      */
getMutation()12444     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
12445       return mutation_;
12446     }
12447     /**
12448      * <code>required .MutationProto mutation = 2;</code>
12449      */
getMutationOrBuilder()12450     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
12451       return mutation_;
12452     }
12453 
12454     // optional .Condition condition = 3;
12455     public static final int CONDITION_FIELD_NUMBER = 3;
12456     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
12457     /**
12458      * <code>optional .Condition condition = 3;</code>
12459      */
hasCondition()12460     public boolean hasCondition() {
12461       return ((bitField0_ & 0x00000004) == 0x00000004);
12462     }
12463     /**
12464      * <code>optional .Condition condition = 3;</code>
12465      */
getCondition()12466     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
12467       return condition_;
12468     }
12469     /**
12470      * <code>optional .Condition condition = 3;</code>
12471      */
getConditionOrBuilder()12472     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
12473       return condition_;
12474     }
12475 
12476     // optional uint64 nonce_group = 4;
12477     public static final int NONCE_GROUP_FIELD_NUMBER = 4;
12478     private long nonceGroup_;
12479     /**
12480      * <code>optional uint64 nonce_group = 4;</code>
12481      */
hasNonceGroup()12482     public boolean hasNonceGroup() {
12483       return ((bitField0_ & 0x00000008) == 0x00000008);
12484     }
12485     /**
12486      * <code>optional uint64 nonce_group = 4;</code>
12487      */
getNonceGroup()12488     public long getNonceGroup() {
12489       return nonceGroup_;
12490     }
12491 
initFields()12492     private void initFields() {
12493       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12494       mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12495       condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12496       nonceGroup_ = 0L;
12497     }
12498     private byte memoizedIsInitialized = -1;
isInitialized()12499     public final boolean isInitialized() {
12500       byte isInitialized = memoizedIsInitialized;
12501       if (isInitialized != -1) return isInitialized == 1;
12502 
12503       if (!hasRegion()) {
12504         memoizedIsInitialized = 0;
12505         return false;
12506       }
12507       if (!hasMutation()) {
12508         memoizedIsInitialized = 0;
12509         return false;
12510       }
12511       if (!getRegion().isInitialized()) {
12512         memoizedIsInitialized = 0;
12513         return false;
12514       }
12515       if (!getMutation().isInitialized()) {
12516         memoizedIsInitialized = 0;
12517         return false;
12518       }
12519       if (hasCondition()) {
12520         if (!getCondition().isInitialized()) {
12521           memoizedIsInitialized = 0;
12522           return false;
12523         }
12524       }
12525       memoizedIsInitialized = 1;
12526       return true;
12527     }
12528 
writeTo(com.google.protobuf.CodedOutputStream output)12529     public void writeTo(com.google.protobuf.CodedOutputStream output)
12530                         throws java.io.IOException {
12531       getSerializedSize();
12532       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12533         output.writeMessage(1, region_);
12534       }
12535       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12536         output.writeMessage(2, mutation_);
12537       }
12538       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12539         output.writeMessage(3, condition_);
12540       }
12541       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12542         output.writeUInt64(4, nonceGroup_);
12543       }
12544       getUnknownFields().writeTo(output);
12545     }
12546 
12547     private int memoizedSerializedSize = -1;
getSerializedSize()12548     public int getSerializedSize() {
12549       int size = memoizedSerializedSize;
12550       if (size != -1) return size;
12551 
12552       size = 0;
12553       if (((bitField0_ & 0x00000001) == 0x00000001)) {
12554         size += com.google.protobuf.CodedOutputStream
12555           .computeMessageSize(1, region_);
12556       }
12557       if (((bitField0_ & 0x00000002) == 0x00000002)) {
12558         size += com.google.protobuf.CodedOutputStream
12559           .computeMessageSize(2, mutation_);
12560       }
12561       if (((bitField0_ & 0x00000004) == 0x00000004)) {
12562         size += com.google.protobuf.CodedOutputStream
12563           .computeMessageSize(3, condition_);
12564       }
12565       if (((bitField0_ & 0x00000008) == 0x00000008)) {
12566         size += com.google.protobuf.CodedOutputStream
12567           .computeUInt64Size(4, nonceGroup_);
12568       }
12569       size += getUnknownFields().getSerializedSize();
12570       memoizedSerializedSize = size;
12571       return size;
12572     }
12573 
12574     private static final long serialVersionUID = 0L;
12575     @java.lang.Override
writeReplace()12576     protected java.lang.Object writeReplace()
12577         throws java.io.ObjectStreamException {
12578       return super.writeReplace();
12579     }
12580 
12581     @java.lang.Override
equals(final java.lang.Object obj)12582     public boolean equals(final java.lang.Object obj) {
12583       if (obj == this) {
12584        return true;
12585       }
12586       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) {
12587         return super.equals(obj);
12588       }
12589       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj;
12590 
12591       boolean result = true;
12592       result = result && (hasRegion() == other.hasRegion());
12593       if (hasRegion()) {
12594         result = result && getRegion()
12595             .equals(other.getRegion());
12596       }
12597       result = result && (hasMutation() == other.hasMutation());
12598       if (hasMutation()) {
12599         result = result && getMutation()
12600             .equals(other.getMutation());
12601       }
12602       result = result && (hasCondition() == other.hasCondition());
12603       if (hasCondition()) {
12604         result = result && getCondition()
12605             .equals(other.getCondition());
12606       }
12607       result = result && (hasNonceGroup() == other.hasNonceGroup());
12608       if (hasNonceGroup()) {
12609         result = result && (getNonceGroup()
12610             == other.getNonceGroup());
12611       }
12612       result = result &&
12613           getUnknownFields().equals(other.getUnknownFields());
12614       return result;
12615     }
12616 
12617     private int memoizedHashCode = 0;
12618     @java.lang.Override
hashCode()12619     public int hashCode() {
12620       if (memoizedHashCode != 0) {
12621         return memoizedHashCode;
12622       }
12623       int hash = 41;
12624       hash = (19 * hash) + getDescriptorForType().hashCode();
12625       if (hasRegion()) {
12626         hash = (37 * hash) + REGION_FIELD_NUMBER;
12627         hash = (53 * hash) + getRegion().hashCode();
12628       }
12629       if (hasMutation()) {
12630         hash = (37 * hash) + MUTATION_FIELD_NUMBER;
12631         hash = (53 * hash) + getMutation().hashCode();
12632       }
12633       if (hasCondition()) {
12634         hash = (37 * hash) + CONDITION_FIELD_NUMBER;
12635         hash = (53 * hash) + getCondition().hashCode();
12636       }
12637       if (hasNonceGroup()) {
12638         hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER;
12639         hash = (53 * hash) + hashLong(getNonceGroup());
12640       }
12641       hash = (29 * hash) + getUnknownFields().hashCode();
12642       memoizedHashCode = hash;
12643       return hash;
12644     }
12645 
parseFrom( com.google.protobuf.ByteString data)12646     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12647         com.google.protobuf.ByteString data)
12648         throws com.google.protobuf.InvalidProtocolBufferException {
12649       return PARSER.parseFrom(data);
12650     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12651     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12652         com.google.protobuf.ByteString data,
12653         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12654         throws com.google.protobuf.InvalidProtocolBufferException {
12655       return PARSER.parseFrom(data, extensionRegistry);
12656     }
parseFrom(byte[] data)12657     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data)
12658         throws com.google.protobuf.InvalidProtocolBufferException {
12659       return PARSER.parseFrom(data);
12660     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12661     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12662         byte[] data,
12663         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12664         throws com.google.protobuf.InvalidProtocolBufferException {
12665       return PARSER.parseFrom(data, extensionRegistry);
12666     }
parseFrom(java.io.InputStream input)12667     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input)
12668         throws java.io.IOException {
12669       return PARSER.parseFrom(input);
12670     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12671     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12672         java.io.InputStream input,
12673         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12674         throws java.io.IOException {
12675       return PARSER.parseFrom(input, extensionRegistry);
12676     }
parseDelimitedFrom(java.io.InputStream input)12677     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input)
12678         throws java.io.IOException {
12679       return PARSER.parseDelimitedFrom(input);
12680     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12681     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(
12682         java.io.InputStream input,
12683         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12684         throws java.io.IOException {
12685       return PARSER.parseDelimitedFrom(input, extensionRegistry);
12686     }
parseFrom( com.google.protobuf.CodedInputStream input)12687     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12688         com.google.protobuf.CodedInputStream input)
12689         throws java.io.IOException {
12690       return PARSER.parseFrom(input);
12691     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12692     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(
12693         com.google.protobuf.CodedInputStream input,
12694         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12695         throws java.io.IOException {
12696       return PARSER.parseFrom(input, extensionRegistry);
12697     }
12698 
newBuilder()12699     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()12700     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype)12701     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) {
12702       return newBuilder().mergeFrom(prototype);
12703     }
toBuilder()12704     public Builder toBuilder() { return newBuilder(this); }
12705 
12706     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)12707     protected Builder newBuilderForType(
12708         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12709       Builder builder = new Builder(parent);
12710       return builder;
12711     }
12712     /**
12713      * Protobuf type {@code MutateRequest}
12714      *
12715      * <pre>
12716      **
12717      * The mutate request. Perform a single Mutate operation.
12718      *
12719      * Optionally, you can specify a condition. The mutate
12720      * will take place only if the condition is met.  Otherwise,
12721      * the mutate will be ignored.  In the response result,
12722      * parameter processed is used to indicate if the mutate
12723      * actually happened.
12724      * </pre>
12725      */
12726     public static final class Builder extends
12727         com.google.protobuf.GeneratedMessage.Builder<Builder>
12728        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder {
12729       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()12730           getDescriptor() {
12731         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor;
12732       }
12733 
12734       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()12735           internalGetFieldAccessorTable() {
12736         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable
12737             .ensureFieldAccessorsInitialized(
12738                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class);
12739       }
12740 
12741       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder()
Builder()12742       private Builder() {
12743         maybeForceBuilderInitialization();
12744       }
12745 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)12746       private Builder(
12747           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
12748         super(parent);
12749         maybeForceBuilderInitialization();
12750       }
maybeForceBuilderInitialization()12751       private void maybeForceBuilderInitialization() {
12752         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
12753           getRegionFieldBuilder();
12754           getMutationFieldBuilder();
12755           getConditionFieldBuilder();
12756         }
12757       }
create()12758       private static Builder create() {
12759         return new Builder();
12760       }
12761 
clear()12762       public Builder clear() {
12763         super.clear();
12764         if (regionBuilder_ == null) {
12765           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12766         } else {
12767           regionBuilder_.clear();
12768         }
12769         bitField0_ = (bitField0_ & ~0x00000001);
12770         if (mutationBuilder_ == null) {
12771           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
12772         } else {
12773           mutationBuilder_.clear();
12774         }
12775         bitField0_ = (bitField0_ & ~0x00000002);
12776         if (conditionBuilder_ == null) {
12777           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
12778         } else {
12779           conditionBuilder_.clear();
12780         }
12781         bitField0_ = (bitField0_ & ~0x00000004);
12782         nonceGroup_ = 0L;
12783         bitField0_ = (bitField0_ & ~0x00000008);
12784         return this;
12785       }
12786 
clone()12787       public Builder clone() {
12788         return create().mergeFrom(buildPartial());
12789       }
12790 
12791       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()12792           getDescriptorForType() {
12793         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor;
12794       }
12795 
getDefaultInstanceForType()12796       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() {
12797         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
12798       }
12799 
build()12800       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() {
12801         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial();
12802         if (!result.isInitialized()) {
12803           throw newUninitializedMessageException(result);
12804         }
12805         return result;
12806       }
12807 
buildPartial()12808       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() {
12809         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this);
12810         int from_bitField0_ = bitField0_;
12811         int to_bitField0_ = 0;
12812         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
12813           to_bitField0_ |= 0x00000001;
12814         }
12815         if (regionBuilder_ == null) {
12816           result.region_ = region_;
12817         } else {
12818           result.region_ = regionBuilder_.build();
12819         }
12820         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
12821           to_bitField0_ |= 0x00000002;
12822         }
12823         if (mutationBuilder_ == null) {
12824           result.mutation_ = mutation_;
12825         } else {
12826           result.mutation_ = mutationBuilder_.build();
12827         }
12828         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
12829           to_bitField0_ |= 0x00000004;
12830         }
12831         if (conditionBuilder_ == null) {
12832           result.condition_ = condition_;
12833         } else {
12834           result.condition_ = conditionBuilder_.build();
12835         }
12836         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
12837           to_bitField0_ |= 0x00000008;
12838         }
12839         result.nonceGroup_ = nonceGroup_;
12840         result.bitField0_ = to_bitField0_;
12841         onBuilt();
12842         return result;
12843       }
12844 
mergeFrom(com.google.protobuf.Message other)12845       public Builder mergeFrom(com.google.protobuf.Message other) {
12846         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) {
12847           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other);
12848         } else {
12849           super.mergeFrom(other);
12850           return this;
12851         }
12852       }
12853 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other)12854       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) {
12855         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this;
12856         if (other.hasRegion()) {
12857           mergeRegion(other.getRegion());
12858         }
12859         if (other.hasMutation()) {
12860           mergeMutation(other.getMutation());
12861         }
12862         if (other.hasCondition()) {
12863           mergeCondition(other.getCondition());
12864         }
12865         if (other.hasNonceGroup()) {
12866           setNonceGroup(other.getNonceGroup());
12867         }
12868         this.mergeUnknownFields(other.getUnknownFields());
12869         return this;
12870       }
12871 
isInitialized()12872       public final boolean isInitialized() {
12873         if (!hasRegion()) {
12874 
12875           return false;
12876         }
12877         if (!hasMutation()) {
12878 
12879           return false;
12880         }
12881         if (!getRegion().isInitialized()) {
12882 
12883           return false;
12884         }
12885         if (!getMutation().isInitialized()) {
12886 
12887           return false;
12888         }
12889         if (hasCondition()) {
12890           if (!getCondition().isInitialized()) {
12891 
12892             return false;
12893           }
12894         }
12895         return true;
12896       }
12897 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12898       public Builder mergeFrom(
12899           com.google.protobuf.CodedInputStream input,
12900           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
12901           throws java.io.IOException {
12902         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null;
12903         try {
12904           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
12905         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
12906           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage();
12907           throw e;
12908         } finally {
12909           if (parsedMessage != null) {
12910             mergeFrom(parsedMessage);
12911           }
12912         }
12913         return this;
12914       }
12915       private int bitField0_;
12916 
12917       // required .RegionSpecifier region = 1;
12918       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12919       private com.google.protobuf.SingleFieldBuilder<
12920           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
12921       /**
12922        * <code>required .RegionSpecifier region = 1;</code>
12923        */
hasRegion()12924       public boolean hasRegion() {
12925         return ((bitField0_ & 0x00000001) == 0x00000001);
12926       }
12927       /**
12928        * <code>required .RegionSpecifier region = 1;</code>
12929        */
getRegion()12930       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
12931         if (regionBuilder_ == null) {
12932           return region_;
12933         } else {
12934           return regionBuilder_.getMessage();
12935         }
12936       }
12937       /**
12938        * <code>required .RegionSpecifier region = 1;</code>
12939        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)12940       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12941         if (regionBuilder_ == null) {
12942           if (value == null) {
12943             throw new NullPointerException();
12944           }
12945           region_ = value;
12946           onChanged();
12947         } else {
12948           regionBuilder_.setMessage(value);
12949         }
12950         bitField0_ |= 0x00000001;
12951         return this;
12952       }
12953       /**
12954        * <code>required .RegionSpecifier region = 1;</code>
12955        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)12956       public Builder setRegion(
12957           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
12958         if (regionBuilder_ == null) {
12959           region_ = builderForValue.build();
12960           onChanged();
12961         } else {
12962           regionBuilder_.setMessage(builderForValue.build());
12963         }
12964         bitField0_ |= 0x00000001;
12965         return this;
12966       }
12967       /**
12968        * <code>required .RegionSpecifier region = 1;</code>
12969        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)12970       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
12971         if (regionBuilder_ == null) {
12972           if (((bitField0_ & 0x00000001) == 0x00000001) &&
12973               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
12974             region_ =
12975               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
12976           } else {
12977             region_ = value;
12978           }
12979           onChanged();
12980         } else {
12981           regionBuilder_.mergeFrom(value);
12982         }
12983         bitField0_ |= 0x00000001;
12984         return this;
12985       }
12986       /**
12987        * <code>required .RegionSpecifier region = 1;</code>
12988        */
clearRegion()12989       public Builder clearRegion() {
12990         if (regionBuilder_ == null) {
12991           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
12992           onChanged();
12993         } else {
12994           regionBuilder_.clear();
12995         }
12996         bitField0_ = (bitField0_ & ~0x00000001);
12997         return this;
12998       }
12999       /**
13000        * <code>required .RegionSpecifier region = 1;</code>
13001        */
getRegionBuilder()13002       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
13003         bitField0_ |= 0x00000001;
13004         onChanged();
13005         return getRegionFieldBuilder().getBuilder();
13006       }
13007       /**
13008        * <code>required .RegionSpecifier region = 1;</code>
13009        */
getRegionOrBuilder()13010       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
13011         if (regionBuilder_ != null) {
13012           return regionBuilder_.getMessageOrBuilder();
13013         } else {
13014           return region_;
13015         }
13016       }
13017       /**
13018        * <code>required .RegionSpecifier region = 1;</code>
13019        */
13020       private com.google.protobuf.SingleFieldBuilder<
13021           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()13022           getRegionFieldBuilder() {
13023         if (regionBuilder_ == null) {
13024           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13025               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
13026                   region_,
13027                   getParentForChildren(),
13028                   isClean());
13029           region_ = null;
13030         }
13031         return regionBuilder_;
13032       }
13033 
13034       // required .MutationProto mutation = 2;
13035       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
13036       private com.google.protobuf.SingleFieldBuilder<
13037           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
13038       /**
13039        * <code>required .MutationProto mutation = 2;</code>
13040        */
hasMutation()13041       public boolean hasMutation() {
13042         return ((bitField0_ & 0x00000002) == 0x00000002);
13043       }
13044       /**
13045        * <code>required .MutationProto mutation = 2;</code>
13046        */
getMutation()13047       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
13048         if (mutationBuilder_ == null) {
13049           return mutation_;
13050         } else {
13051           return mutationBuilder_.getMessage();
13052         }
13053       }
13054       /**
13055        * <code>required .MutationProto mutation = 2;</code>
13056        */
setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value)13057       public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
13058         if (mutationBuilder_ == null) {
13059           if (value == null) {
13060             throw new NullPointerException();
13061           }
13062           mutation_ = value;
13063           onChanged();
13064         } else {
13065           mutationBuilder_.setMessage(value);
13066         }
13067         bitField0_ |= 0x00000002;
13068         return this;
13069       }
13070       /**
13071        * <code>required .MutationProto mutation = 2;</code>
13072        */
setMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue)13073       public Builder setMutation(
13074           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
13075         if (mutationBuilder_ == null) {
13076           mutation_ = builderForValue.build();
13077           onChanged();
13078         } else {
13079           mutationBuilder_.setMessage(builderForValue.build());
13080         }
13081         bitField0_ |= 0x00000002;
13082         return this;
13083       }
13084       /**
13085        * <code>required .MutationProto mutation = 2;</code>
13086        */
mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value)13087       public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
13088         if (mutationBuilder_ == null) {
13089           if (((bitField0_ & 0x00000002) == 0x00000002) &&
13090               mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
13091             mutation_ =
13092               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
13093           } else {
13094             mutation_ = value;
13095           }
13096           onChanged();
13097         } else {
13098           mutationBuilder_.mergeFrom(value);
13099         }
13100         bitField0_ |= 0x00000002;
13101         return this;
13102       }
13103       /**
13104        * <code>required .MutationProto mutation = 2;</code>
13105        */
clearMutation()13106       public Builder clearMutation() {
13107         if (mutationBuilder_ == null) {
13108           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
13109           onChanged();
13110         } else {
13111           mutationBuilder_.clear();
13112         }
13113         bitField0_ = (bitField0_ & ~0x00000002);
13114         return this;
13115       }
13116       /**
13117        * <code>required .MutationProto mutation = 2;</code>
13118        */
getMutationBuilder()13119       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
13120         bitField0_ |= 0x00000002;
13121         onChanged();
13122         return getMutationFieldBuilder().getBuilder();
13123       }
13124       /**
13125        * <code>required .MutationProto mutation = 2;</code>
13126        */
getMutationOrBuilder()13127       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
13128         if (mutationBuilder_ != null) {
13129           return mutationBuilder_.getMessageOrBuilder();
13130         } else {
13131           return mutation_;
13132         }
13133       }
13134       /**
13135        * <code>required .MutationProto mutation = 2;</code>
13136        */
13137       private com.google.protobuf.SingleFieldBuilder<
13138           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationFieldBuilder()13139           getMutationFieldBuilder() {
13140         if (mutationBuilder_ == null) {
13141           mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13142               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
13143                   mutation_,
13144                   getParentForChildren(),
13145                   isClean());
13146           mutation_ = null;
13147         }
13148         return mutationBuilder_;
13149       }
13150 
13151       // optional .Condition condition = 3;
13152       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
13153       private com.google.protobuf.SingleFieldBuilder<
13154           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
13155       /**
13156        * <code>optional .Condition condition = 3;</code>
13157        */
hasCondition()13158       public boolean hasCondition() {
13159         return ((bitField0_ & 0x00000004) == 0x00000004);
13160       }
13161       /**
13162        * <code>optional .Condition condition = 3;</code>
13163        */
getCondition()13164       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
13165         if (conditionBuilder_ == null) {
13166           return condition_;
13167         } else {
13168           return conditionBuilder_.getMessage();
13169         }
13170       }
13171       /**
13172        * <code>optional .Condition condition = 3;</code>
13173        */
setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value)13174       public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
13175         if (conditionBuilder_ == null) {
13176           if (value == null) {
13177             throw new NullPointerException();
13178           }
13179           condition_ = value;
13180           onChanged();
13181         } else {
13182           conditionBuilder_.setMessage(value);
13183         }
13184         bitField0_ |= 0x00000004;
13185         return this;
13186       }
13187       /**
13188        * <code>optional .Condition condition = 3;</code>
13189        */
setCondition( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue)13190       public Builder setCondition(
13191           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
13192         if (conditionBuilder_ == null) {
13193           condition_ = builderForValue.build();
13194           onChanged();
13195         } else {
13196           conditionBuilder_.setMessage(builderForValue.build());
13197         }
13198         bitField0_ |= 0x00000004;
13199         return this;
13200       }
13201       /**
13202        * <code>optional .Condition condition = 3;</code>
13203        */
mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value)13204       public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
13205         if (conditionBuilder_ == null) {
13206           if (((bitField0_ & 0x00000004) == 0x00000004) &&
13207               condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
13208             condition_ =
13209               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
13210           } else {
13211             condition_ = value;
13212           }
13213           onChanged();
13214         } else {
13215           conditionBuilder_.mergeFrom(value);
13216         }
13217         bitField0_ |= 0x00000004;
13218         return this;
13219       }
13220       /**
13221        * <code>optional .Condition condition = 3;</code>
13222        */
clearCondition()13223       public Builder clearCondition() {
13224         if (conditionBuilder_ == null) {
13225           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
13226           onChanged();
13227         } else {
13228           conditionBuilder_.clear();
13229         }
13230         bitField0_ = (bitField0_ & ~0x00000004);
13231         return this;
13232       }
13233       /**
13234        * <code>optional .Condition condition = 3;</code>
13235        */
getConditionBuilder()13236       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
13237         bitField0_ |= 0x00000004;
13238         onChanged();
13239         return getConditionFieldBuilder().getBuilder();
13240       }
13241       /**
13242        * <code>optional .Condition condition = 3;</code>
13243        */
getConditionOrBuilder()13244       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
13245         if (conditionBuilder_ != null) {
13246           return conditionBuilder_.getMessageOrBuilder();
13247         } else {
13248           return condition_;
13249         }
13250       }
13251       /**
13252        * <code>optional .Condition condition = 3;</code>
13253        */
13254       private com.google.protobuf.SingleFieldBuilder<
13255           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>
getConditionFieldBuilder()13256           getConditionFieldBuilder() {
13257         if (conditionBuilder_ == null) {
13258           conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13259               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
13260                   condition_,
13261                   getParentForChildren(),
13262                   isClean());
13263           condition_ = null;
13264         }
13265         return conditionBuilder_;
13266       }
13267 
13268       // optional uint64 nonce_group = 4;
13269       private long nonceGroup_ ;
13270       /**
13271        * <code>optional uint64 nonce_group = 4;</code>
13272        */
hasNonceGroup()13273       public boolean hasNonceGroup() {
13274         return ((bitField0_ & 0x00000008) == 0x00000008);
13275       }
13276       /**
13277        * <code>optional uint64 nonce_group = 4;</code>
13278        */
getNonceGroup()13279       public long getNonceGroup() {
13280         return nonceGroup_;
13281       }
13282       /**
13283        * <code>optional uint64 nonce_group = 4;</code>
13284        */
setNonceGroup(long value)13285       public Builder setNonceGroup(long value) {
13286         bitField0_ |= 0x00000008;
13287         nonceGroup_ = value;
13288         onChanged();
13289         return this;
13290       }
13291       /**
13292        * <code>optional uint64 nonce_group = 4;</code>
13293        */
clearNonceGroup()13294       public Builder clearNonceGroup() {
13295         bitField0_ = (bitField0_ & ~0x00000008);
13296         nonceGroup_ = 0L;
13297         onChanged();
13298         return this;
13299       }
13300 
13301       // @@protoc_insertion_point(builder_scope:MutateRequest)
13302     }
13303 
13304     static {
13305       defaultInstance = new MutateRequest(true);
defaultInstance.initFields()13306       defaultInstance.initFields();
13307     }
13308 
13309     // @@protoc_insertion_point(class_scope:MutateRequest)
13310   }
13311 
13312   public interface MutateResponseOrBuilder
13313       extends com.google.protobuf.MessageOrBuilder {
13314 
13315     // optional .Result result = 1;
13316     /**
13317      * <code>optional .Result result = 1;</code>
13318      */
hasResult()13319     boolean hasResult();
13320     /**
13321      * <code>optional .Result result = 1;</code>
13322      */
getResult()13323     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
13324     /**
13325      * <code>optional .Result result = 1;</code>
13326      */
getResultOrBuilder()13327     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
13328 
13329     // optional bool processed = 2;
13330     /**
13331      * <code>optional bool processed = 2;</code>
13332      *
13333      * <pre>
13334      * used for mutate to indicate processed only
13335      * </pre>
13336      */
hasProcessed()13337     boolean hasProcessed();
13338     /**
13339      * <code>optional bool processed = 2;</code>
13340      *
13341      * <pre>
13342      * used for mutate to indicate processed only
13343      * </pre>
13344      */
getProcessed()13345     boolean getProcessed();
13346   }
13347   /**
13348    * Protobuf type {@code MutateResponse}
13349    */
13350   public static final class MutateResponse extends
13351       com.google.protobuf.GeneratedMessage
13352       implements MutateResponseOrBuilder {
13353     // Use MutateResponse.newBuilder() to construct.
MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)13354     private MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
13355       super(builder);
13356       this.unknownFields = builder.getUnknownFields();
13357     }
MutateResponse(boolean noInit)13358     private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
13359 
13360     private static final MutateResponse defaultInstance;
getDefaultInstance()13361     public static MutateResponse getDefaultInstance() {
13362       return defaultInstance;
13363     }
13364 
getDefaultInstanceForType()13365     public MutateResponse getDefaultInstanceForType() {
13366       return defaultInstance;
13367     }
13368 
13369     private final com.google.protobuf.UnknownFieldSet unknownFields;
13370     @java.lang.Override
13371     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()13372         getUnknownFields() {
13373       return this.unknownFields;
13374     }
MutateResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13375     private MutateResponse(
13376         com.google.protobuf.CodedInputStream input,
13377         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13378         throws com.google.protobuf.InvalidProtocolBufferException {
13379       initFields();
13380       int mutable_bitField0_ = 0;
13381       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
13382           com.google.protobuf.UnknownFieldSet.newBuilder();
13383       try {
13384         boolean done = false;
13385         while (!done) {
13386           int tag = input.readTag();
13387           switch (tag) {
13388             case 0:
13389               done = true;
13390               break;
13391             default: {
13392               if (!parseUnknownField(input, unknownFields,
13393                                      extensionRegistry, tag)) {
13394                 done = true;
13395               }
13396               break;
13397             }
13398             case 10: {
13399               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
13400               if (((bitField0_ & 0x00000001) == 0x00000001)) {
13401                 subBuilder = result_.toBuilder();
13402               }
13403               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
13404               if (subBuilder != null) {
13405                 subBuilder.mergeFrom(result_);
13406                 result_ = subBuilder.buildPartial();
13407               }
13408               bitField0_ |= 0x00000001;
13409               break;
13410             }
13411             case 16: {
13412               bitField0_ |= 0x00000002;
13413               processed_ = input.readBool();
13414               break;
13415             }
13416           }
13417         }
13418       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13419         throw e.setUnfinishedMessage(this);
13420       } catch (java.io.IOException e) {
13421         throw new com.google.protobuf.InvalidProtocolBufferException(
13422             e.getMessage()).setUnfinishedMessage(this);
13423       } finally {
13424         this.unknownFields = unknownFields.build();
13425         makeExtensionsImmutable();
13426       }
13427     }
13428     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13429         getDescriptor() {
13430       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor;
13431     }
13432 
13433     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13434         internalGetFieldAccessorTable() {
13435       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable
13436           .ensureFieldAccessorsInitialized(
13437               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
13438     }
13439 
13440     public static com.google.protobuf.Parser<MutateResponse> PARSER =
13441         new com.google.protobuf.AbstractParser<MutateResponse>() {
13442       public MutateResponse parsePartialFrom(
13443           com.google.protobuf.CodedInputStream input,
13444           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13445           throws com.google.protobuf.InvalidProtocolBufferException {
13446         return new MutateResponse(input, extensionRegistry);
13447       }
13448     };
13449 
13450     @java.lang.Override
getParserForType()13451     public com.google.protobuf.Parser<MutateResponse> getParserForType() {
13452       return PARSER;
13453     }
13454 
13455     private int bitField0_;
13456     // optional .Result result = 1;
13457     public static final int RESULT_FIELD_NUMBER = 1;
13458     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
13459     /**
13460      * <code>optional .Result result = 1;</code>
13461      */
hasResult()13462     public boolean hasResult() {
13463       return ((bitField0_ & 0x00000001) == 0x00000001);
13464     }
13465     /**
13466      * <code>optional .Result result = 1;</code>
13467      */
getResult()13468     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
13469       return result_;
13470     }
13471     /**
13472      * <code>optional .Result result = 1;</code>
13473      */
getResultOrBuilder()13474     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
13475       return result_;
13476     }
13477 
13478     // optional bool processed = 2;
13479     public static final int PROCESSED_FIELD_NUMBER = 2;
13480     private boolean processed_;
13481     /**
13482      * <code>optional bool processed = 2;</code>
13483      *
13484      * <pre>
13485      * used for mutate to indicate processed only
13486      * </pre>
13487      */
hasProcessed()13488     public boolean hasProcessed() {
13489       return ((bitField0_ & 0x00000002) == 0x00000002);
13490     }
13491     /**
13492      * <code>optional bool processed = 2;</code>
13493      *
13494      * <pre>
13495      * used for mutate to indicate processed only
13496      * </pre>
13497      */
getProcessed()13498     public boolean getProcessed() {
13499       return processed_;
13500     }
13501 
initFields()13502     private void initFields() {
13503       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13504       processed_ = false;
13505     }
13506     private byte memoizedIsInitialized = -1;
isInitialized()13507     public final boolean isInitialized() {
13508       byte isInitialized = memoizedIsInitialized;
13509       if (isInitialized != -1) return isInitialized == 1;
13510 
13511       memoizedIsInitialized = 1;
13512       return true;
13513     }
13514 
writeTo(com.google.protobuf.CodedOutputStream output)13515     public void writeTo(com.google.protobuf.CodedOutputStream output)
13516                         throws java.io.IOException {
13517       getSerializedSize();
13518       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13519         output.writeMessage(1, result_);
13520       }
13521       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13522         output.writeBool(2, processed_);
13523       }
13524       getUnknownFields().writeTo(output);
13525     }
13526 
13527     private int memoizedSerializedSize = -1;
getSerializedSize()13528     public int getSerializedSize() {
13529       int size = memoizedSerializedSize;
13530       if (size != -1) return size;
13531 
13532       size = 0;
13533       if (((bitField0_ & 0x00000001) == 0x00000001)) {
13534         size += com.google.protobuf.CodedOutputStream
13535           .computeMessageSize(1, result_);
13536       }
13537       if (((bitField0_ & 0x00000002) == 0x00000002)) {
13538         size += com.google.protobuf.CodedOutputStream
13539           .computeBoolSize(2, processed_);
13540       }
13541       size += getUnknownFields().getSerializedSize();
13542       memoizedSerializedSize = size;
13543       return size;
13544     }
13545 
13546     private static final long serialVersionUID = 0L;
13547     @java.lang.Override
writeReplace()13548     protected java.lang.Object writeReplace()
13549         throws java.io.ObjectStreamException {
13550       return super.writeReplace();
13551     }
13552 
13553     @java.lang.Override
equals(final java.lang.Object obj)13554     public boolean equals(final java.lang.Object obj) {
13555       if (obj == this) {
13556        return true;
13557       }
13558       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) {
13559         return super.equals(obj);
13560       }
13561       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj;
13562 
13563       boolean result = true;
13564       result = result && (hasResult() == other.hasResult());
13565       if (hasResult()) {
13566         result = result && getResult()
13567             .equals(other.getResult());
13568       }
13569       result = result && (hasProcessed() == other.hasProcessed());
13570       if (hasProcessed()) {
13571         result = result && (getProcessed()
13572             == other.getProcessed());
13573       }
13574       result = result &&
13575           getUnknownFields().equals(other.getUnknownFields());
13576       return result;
13577     }
13578 
13579     private int memoizedHashCode = 0;
13580     @java.lang.Override
hashCode()13581     public int hashCode() {
13582       if (memoizedHashCode != 0) {
13583         return memoizedHashCode;
13584       }
13585       int hash = 41;
13586       hash = (19 * hash) + getDescriptorForType().hashCode();
13587       if (hasResult()) {
13588         hash = (37 * hash) + RESULT_FIELD_NUMBER;
13589         hash = (53 * hash) + getResult().hashCode();
13590       }
13591       if (hasProcessed()) {
13592         hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
13593         hash = (53 * hash) + hashBoolean(getProcessed());
13594       }
13595       hash = (29 * hash) + getUnknownFields().hashCode();
13596       memoizedHashCode = hash;
13597       return hash;
13598     }
13599 
parseFrom( com.google.protobuf.ByteString data)13600     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13601         com.google.protobuf.ByteString data)
13602         throws com.google.protobuf.InvalidProtocolBufferException {
13603       return PARSER.parseFrom(data);
13604     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13605     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13606         com.google.protobuf.ByteString data,
13607         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13608         throws com.google.protobuf.InvalidProtocolBufferException {
13609       return PARSER.parseFrom(data, extensionRegistry);
13610     }
parseFrom(byte[] data)13611     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data)
13612         throws com.google.protobuf.InvalidProtocolBufferException {
13613       return PARSER.parseFrom(data);
13614     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13615     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13616         byte[] data,
13617         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13618         throws com.google.protobuf.InvalidProtocolBufferException {
13619       return PARSER.parseFrom(data, extensionRegistry);
13620     }
parseFrom(java.io.InputStream input)13621     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input)
13622         throws java.io.IOException {
13623       return PARSER.parseFrom(input);
13624     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13625     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13626         java.io.InputStream input,
13627         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13628         throws java.io.IOException {
13629       return PARSER.parseFrom(input, extensionRegistry);
13630     }
parseDelimitedFrom(java.io.InputStream input)13631     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input)
13632         throws java.io.IOException {
13633       return PARSER.parseDelimitedFrom(input);
13634     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13635     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(
13636         java.io.InputStream input,
13637         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13638         throws java.io.IOException {
13639       return PARSER.parseDelimitedFrom(input, extensionRegistry);
13640     }
parseFrom( com.google.protobuf.CodedInputStream input)13641     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13642         com.google.protobuf.CodedInputStream input)
13643         throws java.io.IOException {
13644       return PARSER.parseFrom(input);
13645     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13646     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(
13647         com.google.protobuf.CodedInputStream input,
13648         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13649         throws java.io.IOException {
13650       return PARSER.parseFrom(input, extensionRegistry);
13651     }
13652 
newBuilder()13653     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()13654     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype)13655     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) {
13656       return newBuilder().mergeFrom(prototype);
13657     }
toBuilder()13658     public Builder toBuilder() { return newBuilder(this); }
13659 
13660     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)13661     protected Builder newBuilderForType(
13662         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13663       Builder builder = new Builder(parent);
13664       return builder;
13665     }
13666     /**
13667      * Protobuf type {@code MutateResponse}
13668      */
13669     public static final class Builder extends
13670         com.google.protobuf.GeneratedMessage.Builder<Builder>
13671        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder {
13672       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()13673           getDescriptor() {
13674         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor;
13675       }
13676 
13677       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()13678           internalGetFieldAccessorTable() {
13679         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable
13680             .ensureFieldAccessorsInitialized(
13681                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class);
13682       }
13683 
13684       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder()
Builder()13685       private Builder() {
13686         maybeForceBuilderInitialization();
13687       }
13688 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)13689       private Builder(
13690           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
13691         super(parent);
13692         maybeForceBuilderInitialization();
13693       }
maybeForceBuilderInitialization()13694       private void maybeForceBuilderInitialization() {
13695         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
13696           getResultFieldBuilder();
13697         }
13698       }
create()13699       private static Builder create() {
13700         return new Builder();
13701       }
13702 
clear()13703       public Builder clear() {
13704         super.clear();
13705         if (resultBuilder_ == null) {
13706           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13707         } else {
13708           resultBuilder_.clear();
13709         }
13710         bitField0_ = (bitField0_ & ~0x00000001);
13711         processed_ = false;
13712         bitField0_ = (bitField0_ & ~0x00000002);
13713         return this;
13714       }
13715 
clone()13716       public Builder clone() {
13717         return create().mergeFrom(buildPartial());
13718       }
13719 
13720       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()13721           getDescriptorForType() {
13722         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor;
13723       }
13724 
getDefaultInstanceForType()13725       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() {
13726         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
13727       }
13728 
build()13729       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() {
13730         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial();
13731         if (!result.isInitialized()) {
13732           throw newUninitializedMessageException(result);
13733         }
13734         return result;
13735       }
13736 
buildPartial()13737       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() {
13738         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this);
13739         int from_bitField0_ = bitField0_;
13740         int to_bitField0_ = 0;
13741         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
13742           to_bitField0_ |= 0x00000001;
13743         }
13744         if (resultBuilder_ == null) {
13745           result.result_ = result_;
13746         } else {
13747           result.result_ = resultBuilder_.build();
13748         }
13749         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
13750           to_bitField0_ |= 0x00000002;
13751         }
13752         result.processed_ = processed_;
13753         result.bitField0_ = to_bitField0_;
13754         onBuilt();
13755         return result;
13756       }
13757 
mergeFrom(com.google.protobuf.Message other)13758       public Builder mergeFrom(com.google.protobuf.Message other) {
13759         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) {
13760           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other);
13761         } else {
13762           super.mergeFrom(other);
13763           return this;
13764         }
13765       }
13766 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other)13767       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) {
13768         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this;
13769         if (other.hasResult()) {
13770           mergeResult(other.getResult());
13771         }
13772         if (other.hasProcessed()) {
13773           setProcessed(other.getProcessed());
13774         }
13775         this.mergeUnknownFields(other.getUnknownFields());
13776         return this;
13777       }
13778 
isInitialized()13779       public final boolean isInitialized() {
13780         return true;
13781       }
13782 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13783       public Builder mergeFrom(
13784           com.google.protobuf.CodedInputStream input,
13785           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
13786           throws java.io.IOException {
13787         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null;
13788         try {
13789           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
13790         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
13791           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage();
13792           throw e;
13793         } finally {
13794           if (parsedMessage != null) {
13795             mergeFrom(parsedMessage);
13796           }
13797         }
13798         return this;
13799       }
13800       private int bitField0_;
13801 
13802       // optional .Result result = 1;
13803       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13804       private com.google.protobuf.SingleFieldBuilder<
13805           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
13806       /**
13807        * <code>optional .Result result = 1;</code>
13808        */
hasResult()13809       public boolean hasResult() {
13810         return ((bitField0_ & 0x00000001) == 0x00000001);
13811       }
13812       /**
13813        * <code>optional .Result result = 1;</code>
13814        */
getResult()13815       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
13816         if (resultBuilder_ == null) {
13817           return result_;
13818         } else {
13819           return resultBuilder_.getMessage();
13820         }
13821       }
13822       /**
13823        * <code>optional .Result result = 1;</code>
13824        */
setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)13825       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
13826         if (resultBuilder_ == null) {
13827           if (value == null) {
13828             throw new NullPointerException();
13829           }
13830           result_ = value;
13831           onChanged();
13832         } else {
13833           resultBuilder_.setMessage(value);
13834         }
13835         bitField0_ |= 0x00000001;
13836         return this;
13837       }
13838       /**
13839        * <code>optional .Result result = 1;</code>
13840        */
setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)13841       public Builder setResult(
13842           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
13843         if (resultBuilder_ == null) {
13844           result_ = builderForValue.build();
13845           onChanged();
13846         } else {
13847           resultBuilder_.setMessage(builderForValue.build());
13848         }
13849         bitField0_ |= 0x00000001;
13850         return this;
13851       }
13852       /**
13853        * <code>optional .Result result = 1;</code>
13854        */
mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)13855       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
13856         if (resultBuilder_ == null) {
13857           if (((bitField0_ & 0x00000001) == 0x00000001) &&
13858               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
13859             result_ =
13860               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
13861           } else {
13862             result_ = value;
13863           }
13864           onChanged();
13865         } else {
13866           resultBuilder_.mergeFrom(value);
13867         }
13868         bitField0_ |= 0x00000001;
13869         return this;
13870       }
13871       /**
13872        * <code>optional .Result result = 1;</code>
13873        */
clearResult()13874       public Builder clearResult() {
13875         if (resultBuilder_ == null) {
13876           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
13877           onChanged();
13878         } else {
13879           resultBuilder_.clear();
13880         }
13881         bitField0_ = (bitField0_ & ~0x00000001);
13882         return this;
13883       }
13884       /**
13885        * <code>optional .Result result = 1;</code>
13886        */
getResultBuilder()13887       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
13888         bitField0_ |= 0x00000001;
13889         onChanged();
13890         return getResultFieldBuilder().getBuilder();
13891       }
13892       /**
13893        * <code>optional .Result result = 1;</code>
13894        */
getResultOrBuilder()13895       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
13896         if (resultBuilder_ != null) {
13897           return resultBuilder_.getMessageOrBuilder();
13898         } else {
13899           return result_;
13900         }
13901       }
13902       /**
13903        * <code>optional .Result result = 1;</code>
13904        */
13905       private com.google.protobuf.SingleFieldBuilder<
13906           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultFieldBuilder()13907           getResultFieldBuilder() {
13908         if (resultBuilder_ == null) {
13909           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
13910               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
13911                   result_,
13912                   getParentForChildren(),
13913                   isClean());
13914           result_ = null;
13915         }
13916         return resultBuilder_;
13917       }
13918 
13919       // optional bool processed = 2;
13920       private boolean processed_ ;
13921       /**
13922        * <code>optional bool processed = 2;</code>
13923        *
13924        * <pre>
13925        * used for mutate to indicate processed only
13926        * </pre>
13927        */
hasProcessed()13928       public boolean hasProcessed() {
13929         return ((bitField0_ & 0x00000002) == 0x00000002);
13930       }
13931       /**
13932        * <code>optional bool processed = 2;</code>
13933        *
13934        * <pre>
13935        * used for mutate to indicate processed only
13936        * </pre>
13937        */
getProcessed()13938       public boolean getProcessed() {
13939         return processed_;
13940       }
13941       /**
13942        * <code>optional bool processed = 2;</code>
13943        *
13944        * <pre>
13945        * used for mutate to indicate processed only
13946        * </pre>
13947        */
setProcessed(boolean value)13948       public Builder setProcessed(boolean value) {
13949         bitField0_ |= 0x00000002;
13950         processed_ = value;
13951         onChanged();
13952         return this;
13953       }
13954       /**
13955        * <code>optional bool processed = 2;</code>
13956        *
13957        * <pre>
13958        * used for mutate to indicate processed only
13959        * </pre>
13960        */
clearProcessed()13961       public Builder clearProcessed() {
13962         bitField0_ = (bitField0_ & ~0x00000002);
13963         processed_ = false;
13964         onChanged();
13965         return this;
13966       }
13967 
13968       // @@protoc_insertion_point(builder_scope:MutateResponse)
13969     }
13970 
13971     static {
13972       defaultInstance = new MutateResponse(true);
defaultInstance.initFields()13973       defaultInstance.initFields();
13974     }
13975 
13976     // @@protoc_insertion_point(class_scope:MutateResponse)
13977   }
13978 
13979   public interface ScanOrBuilder
13980       extends com.google.protobuf.MessageOrBuilder {
13981 
13982     // repeated .Column column = 1;
13983     /**
13984      * <code>repeated .Column column = 1;</code>
13985      */
13986     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>
getColumnList()13987         getColumnList();
13988     /**
13989      * <code>repeated .Column column = 1;</code>
13990      */
getColumn(int index)13991     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index);
13992     /**
13993      * <code>repeated .Column column = 1;</code>
13994      */
getColumnCount()13995     int getColumnCount();
13996     /**
13997      * <code>repeated .Column column = 1;</code>
13998      */
13999     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList()14000         getColumnOrBuilderList();
14001     /**
14002      * <code>repeated .Column column = 1;</code>
14003      */
getColumnOrBuilder( int index)14004     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
14005         int index);
14006 
14007     // repeated .NameBytesPair attribute = 2;
14008     /**
14009      * <code>repeated .NameBytesPair attribute = 2;</code>
14010      */
14011     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>
getAttributeList()14012         getAttributeList();
14013     /**
14014      * <code>repeated .NameBytesPair attribute = 2;</code>
14015      */
getAttribute(int index)14016     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index);
14017     /**
14018      * <code>repeated .NameBytesPair attribute = 2;</code>
14019      */
getAttributeCount()14020     int getAttributeCount();
14021     /**
14022      * <code>repeated .NameBytesPair attribute = 2;</code>
14023      */
14024     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()14025         getAttributeOrBuilderList();
14026     /**
14027      * <code>repeated .NameBytesPair attribute = 2;</code>
14028      */
getAttributeOrBuilder( int index)14029     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
14030         int index);
14031 
14032     // optional bytes start_row = 3;
14033     /**
14034      * <code>optional bytes start_row = 3;</code>
14035      */
hasStartRow()14036     boolean hasStartRow();
14037     /**
14038      * <code>optional bytes start_row = 3;</code>
14039      */
getStartRow()14040     com.google.protobuf.ByteString getStartRow();
14041 
14042     // optional bytes stop_row = 4;
14043     /**
14044      * <code>optional bytes stop_row = 4;</code>
14045      */
hasStopRow()14046     boolean hasStopRow();
14047     /**
14048      * <code>optional bytes stop_row = 4;</code>
14049      */
getStopRow()14050     com.google.protobuf.ByteString getStopRow();
14051 
14052     // optional .Filter filter = 5;
14053     /**
14054      * <code>optional .Filter filter = 5;</code>
14055      */
hasFilter()14056     boolean hasFilter();
14057     /**
14058      * <code>optional .Filter filter = 5;</code>
14059      */
getFilter()14060     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter();
14061     /**
14062      * <code>optional .Filter filter = 5;</code>
14063      */
getFilterOrBuilder()14064     org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder();
14065 
14066     // optional .TimeRange time_range = 6;
14067     /**
14068      * <code>optional .TimeRange time_range = 6;</code>
14069      */
hasTimeRange()14070     boolean hasTimeRange();
14071     /**
14072      * <code>optional .TimeRange time_range = 6;</code>
14073      */
getTimeRange()14074     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange();
14075     /**
14076      * <code>optional .TimeRange time_range = 6;</code>
14077      */
getTimeRangeOrBuilder()14078     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder();
14079 
14080     // optional uint32 max_versions = 7 [default = 1];
14081     /**
14082      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14083      */
hasMaxVersions()14084     boolean hasMaxVersions();
14085     /**
14086      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14087      */
getMaxVersions()14088     int getMaxVersions();
14089 
14090     // optional bool cache_blocks = 8 [default = true];
14091     /**
14092      * <code>optional bool cache_blocks = 8 [default = true];</code>
14093      */
hasCacheBlocks()14094     boolean hasCacheBlocks();
14095     /**
14096      * <code>optional bool cache_blocks = 8 [default = true];</code>
14097      */
getCacheBlocks()14098     boolean getCacheBlocks();
14099 
14100     // optional uint32 batch_size = 9;
14101     /**
14102      * <code>optional uint32 batch_size = 9;</code>
14103      */
hasBatchSize()14104     boolean hasBatchSize();
14105     /**
14106      * <code>optional uint32 batch_size = 9;</code>
14107      */
getBatchSize()14108     int getBatchSize();
14109 
14110     // optional uint64 max_result_size = 10;
14111     /**
14112      * <code>optional uint64 max_result_size = 10;</code>
14113      */
hasMaxResultSize()14114     boolean hasMaxResultSize();
14115     /**
14116      * <code>optional uint64 max_result_size = 10;</code>
14117      */
getMaxResultSize()14118     long getMaxResultSize();
14119 
14120     // optional uint32 store_limit = 11;
14121     /**
14122      * <code>optional uint32 store_limit = 11;</code>
14123      */
hasStoreLimit()14124     boolean hasStoreLimit();
14125     /**
14126      * <code>optional uint32 store_limit = 11;</code>
14127      */
getStoreLimit()14128     int getStoreLimit();
14129 
14130     // optional uint32 store_offset = 12;
14131     /**
14132      * <code>optional uint32 store_offset = 12;</code>
14133      */
hasStoreOffset()14134     boolean hasStoreOffset();
14135     /**
14136      * <code>optional uint32 store_offset = 12;</code>
14137      */
getStoreOffset()14138     int getStoreOffset();
14139 
14140     // optional bool load_column_families_on_demand = 13;
14141     /**
14142      * <code>optional bool load_column_families_on_demand = 13;</code>
14143      *
14144      * <pre>
14145      * DO NOT add defaults to load_column_families_on_demand.
14146      * </pre>
14147      */
hasLoadColumnFamiliesOnDemand()14148     boolean hasLoadColumnFamiliesOnDemand();
14149     /**
14150      * <code>optional bool load_column_families_on_demand = 13;</code>
14151      *
14152      * <pre>
14153      * DO NOT add defaults to load_column_families_on_demand.
14154      * </pre>
14155      */
getLoadColumnFamiliesOnDemand()14156     boolean getLoadColumnFamiliesOnDemand();
14157 
14158     // optional bool small = 14;
14159     /**
14160      * <code>optional bool small = 14;</code>
14161      */
hasSmall()14162     boolean hasSmall();
14163     /**
14164      * <code>optional bool small = 14;</code>
14165      */
getSmall()14166     boolean getSmall();
14167 
14168     // optional bool reversed = 15 [default = false];
14169     /**
14170      * <code>optional bool reversed = 15 [default = false];</code>
14171      */
hasReversed()14172     boolean hasReversed();
14173     /**
14174      * <code>optional bool reversed = 15 [default = false];</code>
14175      */
getReversed()14176     boolean getReversed();
14177 
14178     // optional .Consistency consistency = 16 [default = STRONG];
14179     /**
14180      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
14181      */
hasConsistency()14182     boolean hasConsistency();
14183     /**
14184      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
14185      */
getConsistency()14186     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency();
14187 
14188     // optional uint32 caching = 17;
14189     /**
14190      * <code>optional uint32 caching = 17;</code>
14191      */
hasCaching()14192     boolean hasCaching();
14193     /**
14194      * <code>optional uint32 caching = 17;</code>
14195      */
getCaching()14196     int getCaching();
14197 
14198     // optional bool allow_partial_results = 18;
14199     /**
14200      * <code>optional bool allow_partial_results = 18;</code>
14201      */
hasAllowPartialResults()14202     boolean hasAllowPartialResults();
14203     /**
14204      * <code>optional bool allow_partial_results = 18;</code>
14205      */
getAllowPartialResults()14206     boolean getAllowPartialResults();
14207 
14208     // repeated .ColumnFamilyTimeRange cf_time_range = 19;
14209     /**
14210      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14211      */
14212     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>
getCfTimeRangeList()14213         getCfTimeRangeList();
14214     /**
14215      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14216      */
getCfTimeRange(int index)14217     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index);
14218     /**
14219      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14220      */
getCfTimeRangeCount()14221     int getCfTimeRangeCount();
14222     /**
14223      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14224      */
14225     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList()14226         getCfTimeRangeOrBuilderList();
14227     /**
14228      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14229      */
getCfTimeRangeOrBuilder( int index)14230     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
14231         int index);
14232   }
14233   /**
14234    * Protobuf type {@code Scan}
14235    *
14236    * <pre>
14237    **
14238    * Instead of get from a table, you can scan it with optional filters.
14239    * You can specify the row key range, time range, the columns/families
14240    * to scan and so on.
14241    *
14242    * This scan is used the first time in a scan request. The response of
14243    * the initial scan will return a scanner id, which should be used to
14244    * fetch result batches later on before it is closed.
14245    * </pre>
14246    */
14247   public static final class Scan extends
14248       com.google.protobuf.GeneratedMessage
14249       implements ScanOrBuilder {
14250     // Use Scan.newBuilder() to construct.
Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder)14251     private Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
14252       super(builder);
14253       this.unknownFields = builder.getUnknownFields();
14254     }
Scan(boolean noInit)14255     private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
14256 
14257     private static final Scan defaultInstance;
getDefaultInstance()14258     public static Scan getDefaultInstance() {
14259       return defaultInstance;
14260     }
14261 
getDefaultInstanceForType()14262     public Scan getDefaultInstanceForType() {
14263       return defaultInstance;
14264     }
14265 
14266     private final com.google.protobuf.UnknownFieldSet unknownFields;
14267     @java.lang.Override
14268     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()14269         getUnknownFields() {
14270       return this.unknownFields;
14271     }
Scan( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14272     private Scan(
14273         com.google.protobuf.CodedInputStream input,
14274         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14275         throws com.google.protobuf.InvalidProtocolBufferException {
14276       initFields();
14277       int mutable_bitField0_ = 0;
14278       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
14279           com.google.protobuf.UnknownFieldSet.newBuilder();
14280       try {
14281         boolean done = false;
14282         while (!done) {
14283           int tag = input.readTag();
14284           switch (tag) {
14285             case 0:
14286               done = true;
14287               break;
14288             default: {
14289               if (!parseUnknownField(input, unknownFields,
14290                                      extensionRegistry, tag)) {
14291                 done = true;
14292               }
14293               break;
14294             }
14295             case 10: {
14296               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
14297                 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>();
14298                 mutable_bitField0_ |= 0x00000001;
14299               }
14300               column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry));
14301               break;
14302             }
14303             case 18: {
14304               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
14305                 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>();
14306                 mutable_bitField0_ |= 0x00000002;
14307               }
14308               attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry));
14309               break;
14310             }
14311             case 26: {
14312               bitField0_ |= 0x00000001;
14313               startRow_ = input.readBytes();
14314               break;
14315             }
14316             case 34: {
14317               bitField0_ |= 0x00000002;
14318               stopRow_ = input.readBytes();
14319               break;
14320             }
14321             case 42: {
14322               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null;
14323               if (((bitField0_ & 0x00000004) == 0x00000004)) {
14324                 subBuilder = filter_.toBuilder();
14325               }
14326               filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry);
14327               if (subBuilder != null) {
14328                 subBuilder.mergeFrom(filter_);
14329                 filter_ = subBuilder.buildPartial();
14330               }
14331               bitField0_ |= 0x00000004;
14332               break;
14333             }
14334             case 50: {
14335               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null;
14336               if (((bitField0_ & 0x00000008) == 0x00000008)) {
14337                 subBuilder = timeRange_.toBuilder();
14338               }
14339               timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry);
14340               if (subBuilder != null) {
14341                 subBuilder.mergeFrom(timeRange_);
14342                 timeRange_ = subBuilder.buildPartial();
14343               }
14344               bitField0_ |= 0x00000008;
14345               break;
14346             }
14347             case 56: {
14348               bitField0_ |= 0x00000010;
14349               maxVersions_ = input.readUInt32();
14350               break;
14351             }
14352             case 64: {
14353               bitField0_ |= 0x00000020;
14354               cacheBlocks_ = input.readBool();
14355               break;
14356             }
14357             case 72: {
14358               bitField0_ |= 0x00000040;
14359               batchSize_ = input.readUInt32();
14360               break;
14361             }
14362             case 80: {
14363               bitField0_ |= 0x00000080;
14364               maxResultSize_ = input.readUInt64();
14365               break;
14366             }
14367             case 88: {
14368               bitField0_ |= 0x00000100;
14369               storeLimit_ = input.readUInt32();
14370               break;
14371             }
14372             case 96: {
14373               bitField0_ |= 0x00000200;
14374               storeOffset_ = input.readUInt32();
14375               break;
14376             }
14377             case 104: {
14378               bitField0_ |= 0x00000400;
14379               loadColumnFamiliesOnDemand_ = input.readBool();
14380               break;
14381             }
14382             case 112: {
14383               bitField0_ |= 0x00000800;
14384               small_ = input.readBool();
14385               break;
14386             }
14387             case 120: {
14388               bitField0_ |= 0x00001000;
14389               reversed_ = input.readBool();
14390               break;
14391             }
14392             case 128: {
14393               int rawValue = input.readEnum();
14394               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue);
14395               if (value == null) {
14396                 unknownFields.mergeVarintField(16, rawValue);
14397               } else {
14398                 bitField0_ |= 0x00002000;
14399                 consistency_ = value;
14400               }
14401               break;
14402             }
14403             case 136: {
14404               bitField0_ |= 0x00004000;
14405               caching_ = input.readUInt32();
14406               break;
14407             }
14408             case 144: {
14409               bitField0_ |= 0x00008000;
14410               allowPartialResults_ = input.readBool();
14411               break;
14412             }
14413             case 154: {
14414               if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
14415                 cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>();
14416                 mutable_bitField0_ |= 0x00040000;
14417               }
14418               cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry));
14419               break;
14420             }
14421           }
14422         }
14423       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
14424         throw e.setUnfinishedMessage(this);
14425       } catch (java.io.IOException e) {
14426         throw new com.google.protobuf.InvalidProtocolBufferException(
14427             e.getMessage()).setUnfinishedMessage(this);
14428       } finally {
14429         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
14430           column_ = java.util.Collections.unmodifiableList(column_);
14431         }
14432         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
14433           attribute_ = java.util.Collections.unmodifiableList(attribute_);
14434         }
14435         if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
14436           cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
14437         }
14438         this.unknownFields = unknownFields.build();
14439         makeExtensionsImmutable();
14440       }
14441     }
14442     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()14443         getDescriptor() {
14444       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor;
14445     }
14446 
14447     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()14448         internalGetFieldAccessorTable() {
14449       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable
14450           .ensureFieldAccessorsInitialized(
14451               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
14452     }
14453 
14454     public static com.google.protobuf.Parser<Scan> PARSER =
14455         new com.google.protobuf.AbstractParser<Scan>() {
14456       public Scan parsePartialFrom(
14457           com.google.protobuf.CodedInputStream input,
14458           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
14459           throws com.google.protobuf.InvalidProtocolBufferException {
14460         return new Scan(input, extensionRegistry);
14461       }
14462     };
14463 
14464     @java.lang.Override
getParserForType()14465     public com.google.protobuf.Parser<Scan> getParserForType() {
14466       return PARSER;
14467     }
14468 
14469     private int bitField0_;
14470     // repeated .Column column = 1;
14471     public static final int COLUMN_FIELD_NUMBER = 1;
14472     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_;
14473     /**
14474      * <code>repeated .Column column = 1;</code>
14475      */
getColumnList()14476     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
14477       return column_;
14478     }
14479     /**
14480      * <code>repeated .Column column = 1;</code>
14481      */
14482     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList()14483         getColumnOrBuilderList() {
14484       return column_;
14485     }
14486     /**
14487      * <code>repeated .Column column = 1;</code>
14488      */
getColumnCount()14489     public int getColumnCount() {
14490       return column_.size();
14491     }
14492     /**
14493      * <code>repeated .Column column = 1;</code>
14494      */
getColumn(int index)14495     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
14496       return column_.get(index);
14497     }
14498     /**
14499      * <code>repeated .Column column = 1;</code>
14500      */
getColumnOrBuilder( int index)14501     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
14502         int index) {
14503       return column_.get(index);
14504     }
14505 
14506     // repeated .NameBytesPair attribute = 2;
14507     public static final int ATTRIBUTE_FIELD_NUMBER = 2;
14508     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_;
14509     /**
14510      * <code>repeated .NameBytesPair attribute = 2;</code>
14511      */
getAttributeList()14512     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
14513       return attribute_;
14514     }
14515     /**
14516      * <code>repeated .NameBytesPair attribute = 2;</code>
14517      */
14518     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()14519         getAttributeOrBuilderList() {
14520       return attribute_;
14521     }
14522     /**
14523      * <code>repeated .NameBytesPair attribute = 2;</code>
14524      */
getAttributeCount()14525     public int getAttributeCount() {
14526       return attribute_.size();
14527     }
14528     /**
14529      * <code>repeated .NameBytesPair attribute = 2;</code>
14530      */
getAttribute(int index)14531     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
14532       return attribute_.get(index);
14533     }
14534     /**
14535      * <code>repeated .NameBytesPair attribute = 2;</code>
14536      */
getAttributeOrBuilder( int index)14537     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
14538         int index) {
14539       return attribute_.get(index);
14540     }
14541 
14542     // optional bytes start_row = 3;
14543     public static final int START_ROW_FIELD_NUMBER = 3;
14544     private com.google.protobuf.ByteString startRow_;
14545     /**
14546      * <code>optional bytes start_row = 3;</code>
14547      */
hasStartRow()14548     public boolean hasStartRow() {
14549       return ((bitField0_ & 0x00000001) == 0x00000001);
14550     }
14551     /**
14552      * <code>optional bytes start_row = 3;</code>
14553      */
getStartRow()14554     public com.google.protobuf.ByteString getStartRow() {
14555       return startRow_;
14556     }
14557 
14558     // optional bytes stop_row = 4;
14559     public static final int STOP_ROW_FIELD_NUMBER = 4;
14560     private com.google.protobuf.ByteString stopRow_;
14561     /**
14562      * <code>optional bytes stop_row = 4;</code>
14563      */
hasStopRow()14564     public boolean hasStopRow() {
14565       return ((bitField0_ & 0x00000002) == 0x00000002);
14566     }
14567     /**
14568      * <code>optional bytes stop_row = 4;</code>
14569      */
getStopRow()14570     public com.google.protobuf.ByteString getStopRow() {
14571       return stopRow_;
14572     }
14573 
14574     // optional .Filter filter = 5;
14575     public static final int FILTER_FIELD_NUMBER = 5;
14576     private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_;
14577     /**
14578      * <code>optional .Filter filter = 5;</code>
14579      */
hasFilter()14580     public boolean hasFilter() {
14581       return ((bitField0_ & 0x00000004) == 0x00000004);
14582     }
14583     /**
14584      * <code>optional .Filter filter = 5;</code>
14585      */
getFilter()14586     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
14587       return filter_;
14588     }
14589     /**
14590      * <code>optional .Filter filter = 5;</code>
14591      */
getFilterOrBuilder()14592     public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
14593       return filter_;
14594     }
14595 
14596     // optional .TimeRange time_range = 6;
14597     public static final int TIME_RANGE_FIELD_NUMBER = 6;
14598     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_;
14599     /**
14600      * <code>optional .TimeRange time_range = 6;</code>
14601      */
hasTimeRange()14602     public boolean hasTimeRange() {
14603       return ((bitField0_ & 0x00000008) == 0x00000008);
14604     }
14605     /**
14606      * <code>optional .TimeRange time_range = 6;</code>
14607      */
getTimeRange()14608     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
14609       return timeRange_;
14610     }
14611     /**
14612      * <code>optional .TimeRange time_range = 6;</code>
14613      */
getTimeRangeOrBuilder()14614     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
14615       return timeRange_;
14616     }
14617 
14618     // optional uint32 max_versions = 7 [default = 1];
14619     public static final int MAX_VERSIONS_FIELD_NUMBER = 7;
14620     private int maxVersions_;
14621     /**
14622      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14623      */
hasMaxVersions()14624     public boolean hasMaxVersions() {
14625       return ((bitField0_ & 0x00000010) == 0x00000010);
14626     }
14627     /**
14628      * <code>optional uint32 max_versions = 7 [default = 1];</code>
14629      */
getMaxVersions()14630     public int getMaxVersions() {
14631       return maxVersions_;
14632     }
14633 
14634     // optional bool cache_blocks = 8 [default = true];
14635     public static final int CACHE_BLOCKS_FIELD_NUMBER = 8;
14636     private boolean cacheBlocks_;
14637     /**
14638      * <code>optional bool cache_blocks = 8 [default = true];</code>
14639      */
hasCacheBlocks()14640     public boolean hasCacheBlocks() {
14641       return ((bitField0_ & 0x00000020) == 0x00000020);
14642     }
14643     /**
14644      * <code>optional bool cache_blocks = 8 [default = true];</code>
14645      */
getCacheBlocks()14646     public boolean getCacheBlocks() {
14647       return cacheBlocks_;
14648     }
14649 
14650     // optional uint32 batch_size = 9;
14651     public static final int BATCH_SIZE_FIELD_NUMBER = 9;
14652     private int batchSize_;
14653     /**
14654      * <code>optional uint32 batch_size = 9;</code>
14655      */
hasBatchSize()14656     public boolean hasBatchSize() {
14657       return ((bitField0_ & 0x00000040) == 0x00000040);
14658     }
14659     /**
14660      * <code>optional uint32 batch_size = 9;</code>
14661      */
getBatchSize()14662     public int getBatchSize() {
14663       return batchSize_;
14664     }
14665 
14666     // optional uint64 max_result_size = 10;
14667     public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10;
14668     private long maxResultSize_;
14669     /**
14670      * <code>optional uint64 max_result_size = 10;</code>
14671      */
hasMaxResultSize()14672     public boolean hasMaxResultSize() {
14673       return ((bitField0_ & 0x00000080) == 0x00000080);
14674     }
14675     /**
14676      * <code>optional uint64 max_result_size = 10;</code>
14677      */
getMaxResultSize()14678     public long getMaxResultSize() {
14679       return maxResultSize_;
14680     }
14681 
14682     // optional uint32 store_limit = 11;
14683     public static final int STORE_LIMIT_FIELD_NUMBER = 11;
14684     private int storeLimit_;
14685     /**
14686      * <code>optional uint32 store_limit = 11;</code>
14687      */
hasStoreLimit()14688     public boolean hasStoreLimit() {
14689       return ((bitField0_ & 0x00000100) == 0x00000100);
14690     }
14691     /**
14692      * <code>optional uint32 store_limit = 11;</code>
14693      */
getStoreLimit()14694     public int getStoreLimit() {
14695       return storeLimit_;
14696     }
14697 
14698     // optional uint32 store_offset = 12;
14699     public static final int STORE_OFFSET_FIELD_NUMBER = 12;
14700     private int storeOffset_;
14701     /**
14702      * <code>optional uint32 store_offset = 12;</code>
14703      */
hasStoreOffset()14704     public boolean hasStoreOffset() {
14705       return ((bitField0_ & 0x00000200) == 0x00000200);
14706     }
14707     /**
14708      * <code>optional uint32 store_offset = 12;</code>
14709      */
getStoreOffset()14710     public int getStoreOffset() {
14711       return storeOffset_;
14712     }
14713 
14714     // optional bool load_column_families_on_demand = 13;
14715     public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13;
14716     private boolean loadColumnFamiliesOnDemand_;
14717     /**
14718      * <code>optional bool load_column_families_on_demand = 13;</code>
14719      *
14720      * <pre>
14721      * DO NOT add defaults to load_column_families_on_demand.
14722      * </pre>
14723      */
hasLoadColumnFamiliesOnDemand()14724     public boolean hasLoadColumnFamiliesOnDemand() {
14725       return ((bitField0_ & 0x00000400) == 0x00000400);
14726     }
14727     /**
14728      * <code>optional bool load_column_families_on_demand = 13;</code>
14729      *
14730      * <pre>
14731      * DO NOT add defaults to load_column_families_on_demand.
14732      * </pre>
14733      */
getLoadColumnFamiliesOnDemand()14734     public boolean getLoadColumnFamiliesOnDemand() {
14735       return loadColumnFamiliesOnDemand_;
14736     }
14737 
14738     // optional bool small = 14;
14739     public static final int SMALL_FIELD_NUMBER = 14;
14740     private boolean small_;
14741     /**
14742      * <code>optional bool small = 14;</code>
14743      */
hasSmall()14744     public boolean hasSmall() {
14745       return ((bitField0_ & 0x00000800) == 0x00000800);
14746     }
14747     /**
14748      * <code>optional bool small = 14;</code>
14749      */
getSmall()14750     public boolean getSmall() {
14751       return small_;
14752     }
14753 
14754     // optional bool reversed = 15 [default = false];
14755     public static final int REVERSED_FIELD_NUMBER = 15;
14756     private boolean reversed_;
14757     /**
14758      * <code>optional bool reversed = 15 [default = false];</code>
14759      */
hasReversed()14760     public boolean hasReversed() {
14761       return ((bitField0_ & 0x00001000) == 0x00001000);
14762     }
14763     /**
14764      * <code>optional bool reversed = 15 [default = false];</code>
14765      */
getReversed()14766     public boolean getReversed() {
14767       return reversed_;
14768     }
14769 
14770     // optional .Consistency consistency = 16 [default = STRONG];
14771     public static final int CONSISTENCY_FIELD_NUMBER = 16;
14772     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_;
14773     /**
14774      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
14775      */
hasConsistency()14776     public boolean hasConsistency() {
14777       return ((bitField0_ & 0x00002000) == 0x00002000);
14778     }
14779     /**
14780      * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
14781      */
getConsistency()14782     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
14783       return consistency_;
14784     }
14785 
14786     // optional uint32 caching = 17;
14787     public static final int CACHING_FIELD_NUMBER = 17;
14788     private int caching_;
14789     /**
14790      * <code>optional uint32 caching = 17;</code>
14791      */
hasCaching()14792     public boolean hasCaching() {
14793       return ((bitField0_ & 0x00004000) == 0x00004000);
14794     }
14795     /**
14796      * <code>optional uint32 caching = 17;</code>
14797      */
getCaching()14798     public int getCaching() {
14799       return caching_;
14800     }
14801 
14802     // optional bool allow_partial_results = 18;
14803     public static final int ALLOW_PARTIAL_RESULTS_FIELD_NUMBER = 18;
14804     private boolean allowPartialResults_;
14805     /**
14806      * <code>optional bool allow_partial_results = 18;</code>
14807      */
hasAllowPartialResults()14808     public boolean hasAllowPartialResults() {
14809       return ((bitField0_ & 0x00008000) == 0x00008000);
14810     }
14811     /**
14812      * <code>optional bool allow_partial_results = 18;</code>
14813      */
getAllowPartialResults()14814     public boolean getAllowPartialResults() {
14815       return allowPartialResults_;
14816     }
14817 
14818     // repeated .ColumnFamilyTimeRange cf_time_range = 19;
14819     public static final int CF_TIME_RANGE_FIELD_NUMBER = 19;
14820     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_;
14821     /**
14822      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14823      */
getCfTimeRangeList()14824     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
14825       return cfTimeRange_;
14826     }
14827     /**
14828      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14829      */
14830     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList()14831         getCfTimeRangeOrBuilderList() {
14832       return cfTimeRange_;
14833     }
14834     /**
14835      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14836      */
getCfTimeRangeCount()14837     public int getCfTimeRangeCount() {
14838       return cfTimeRange_.size();
14839     }
14840     /**
14841      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14842      */
getCfTimeRange(int index)14843     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
14844       return cfTimeRange_.get(index);
14845     }
14846     /**
14847      * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
14848      */
getCfTimeRangeOrBuilder( int index)14849     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
14850         int index) {
14851       return cfTimeRange_.get(index);
14852     }
14853 
initFields()14854     private void initFields() {
14855       column_ = java.util.Collections.emptyList();
14856       attribute_ = java.util.Collections.emptyList();
14857       startRow_ = com.google.protobuf.ByteString.EMPTY;
14858       stopRow_ = com.google.protobuf.ByteString.EMPTY;
14859       filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
14860       timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
14861       maxVersions_ = 1;
14862       cacheBlocks_ = true;
14863       batchSize_ = 0;
14864       maxResultSize_ = 0L;
14865       storeLimit_ = 0;
14866       storeOffset_ = 0;
14867       loadColumnFamiliesOnDemand_ = false;
14868       small_ = false;
14869       reversed_ = false;
14870       consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
14871       caching_ = 0;
14872       allowPartialResults_ = false;
14873       cfTimeRange_ = java.util.Collections.emptyList();
14874     }
14875     private byte memoizedIsInitialized = -1;
isInitialized()14876     public final boolean isInitialized() {
14877       byte isInitialized = memoizedIsInitialized;
14878       if (isInitialized != -1) return isInitialized == 1;
14879 
14880       for (int i = 0; i < getColumnCount(); i++) {
14881         if (!getColumn(i).isInitialized()) {
14882           memoizedIsInitialized = 0;
14883           return false;
14884         }
14885       }
14886       for (int i = 0; i < getAttributeCount(); i++) {
14887         if (!getAttribute(i).isInitialized()) {
14888           memoizedIsInitialized = 0;
14889           return false;
14890         }
14891       }
14892       if (hasFilter()) {
14893         if (!getFilter().isInitialized()) {
14894           memoizedIsInitialized = 0;
14895           return false;
14896         }
14897       }
14898       for (int i = 0; i < getCfTimeRangeCount(); i++) {
14899         if (!getCfTimeRange(i).isInitialized()) {
14900           memoizedIsInitialized = 0;
14901           return false;
14902         }
14903       }
14904       memoizedIsInitialized = 1;
14905       return true;
14906     }
14907 
writeTo(com.google.protobuf.CodedOutputStream output)14908     public void writeTo(com.google.protobuf.CodedOutputStream output)
14909                         throws java.io.IOException {
14910       getSerializedSize();
14911       for (int i = 0; i < column_.size(); i++) {
14912         output.writeMessage(1, column_.get(i));
14913       }
14914       for (int i = 0; i < attribute_.size(); i++) {
14915         output.writeMessage(2, attribute_.get(i));
14916       }
14917       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14918         output.writeBytes(3, startRow_);
14919       }
14920       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14921         output.writeBytes(4, stopRow_);
14922       }
14923       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14924         output.writeMessage(5, filter_);
14925       }
14926       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14927         output.writeMessage(6, timeRange_);
14928       }
14929       if (((bitField0_ & 0x00000010) == 0x00000010)) {
14930         output.writeUInt32(7, maxVersions_);
14931       }
14932       if (((bitField0_ & 0x00000020) == 0x00000020)) {
14933         output.writeBool(8, cacheBlocks_);
14934       }
14935       if (((bitField0_ & 0x00000040) == 0x00000040)) {
14936         output.writeUInt32(9, batchSize_);
14937       }
14938       if (((bitField0_ & 0x00000080) == 0x00000080)) {
14939         output.writeUInt64(10, maxResultSize_);
14940       }
14941       if (((bitField0_ & 0x00000100) == 0x00000100)) {
14942         output.writeUInt32(11, storeLimit_);
14943       }
14944       if (((bitField0_ & 0x00000200) == 0x00000200)) {
14945         output.writeUInt32(12, storeOffset_);
14946       }
14947       if (((bitField0_ & 0x00000400) == 0x00000400)) {
14948         output.writeBool(13, loadColumnFamiliesOnDemand_);
14949       }
14950       if (((bitField0_ & 0x00000800) == 0x00000800)) {
14951         output.writeBool(14, small_);
14952       }
14953       if (((bitField0_ & 0x00001000) == 0x00001000)) {
14954         output.writeBool(15, reversed_);
14955       }
14956       if (((bitField0_ & 0x00002000) == 0x00002000)) {
14957         output.writeEnum(16, consistency_.getNumber());
14958       }
14959       if (((bitField0_ & 0x00004000) == 0x00004000)) {
14960         output.writeUInt32(17, caching_);
14961       }
14962       if (((bitField0_ & 0x00008000) == 0x00008000)) {
14963         output.writeBool(18, allowPartialResults_);
14964       }
14965       for (int i = 0; i < cfTimeRange_.size(); i++) {
14966         output.writeMessage(19, cfTimeRange_.get(i));
14967       }
14968       getUnknownFields().writeTo(output);
14969     }
14970 
14971     private int memoizedSerializedSize = -1;
getSerializedSize()14972     public int getSerializedSize() {
14973       int size = memoizedSerializedSize;
14974       if (size != -1) return size;
14975 
14976       size = 0;
14977       for (int i = 0; i < column_.size(); i++) {
14978         size += com.google.protobuf.CodedOutputStream
14979           .computeMessageSize(1, column_.get(i));
14980       }
14981       for (int i = 0; i < attribute_.size(); i++) {
14982         size += com.google.protobuf.CodedOutputStream
14983           .computeMessageSize(2, attribute_.get(i));
14984       }
14985       if (((bitField0_ & 0x00000001) == 0x00000001)) {
14986         size += com.google.protobuf.CodedOutputStream
14987           .computeBytesSize(3, startRow_);
14988       }
14989       if (((bitField0_ & 0x00000002) == 0x00000002)) {
14990         size += com.google.protobuf.CodedOutputStream
14991           .computeBytesSize(4, stopRow_);
14992       }
14993       if (((bitField0_ & 0x00000004) == 0x00000004)) {
14994         size += com.google.protobuf.CodedOutputStream
14995           .computeMessageSize(5, filter_);
14996       }
14997       if (((bitField0_ & 0x00000008) == 0x00000008)) {
14998         size += com.google.protobuf.CodedOutputStream
14999           .computeMessageSize(6, timeRange_);
15000       }
15001       if (((bitField0_ & 0x00000010) == 0x00000010)) {
15002         size += com.google.protobuf.CodedOutputStream
15003           .computeUInt32Size(7, maxVersions_);
15004       }
15005       if (((bitField0_ & 0x00000020) == 0x00000020)) {
15006         size += com.google.protobuf.CodedOutputStream
15007           .computeBoolSize(8, cacheBlocks_);
15008       }
15009       if (((bitField0_ & 0x00000040) == 0x00000040)) {
15010         size += com.google.protobuf.CodedOutputStream
15011           .computeUInt32Size(9, batchSize_);
15012       }
15013       if (((bitField0_ & 0x00000080) == 0x00000080)) {
15014         size += com.google.protobuf.CodedOutputStream
15015           .computeUInt64Size(10, maxResultSize_);
15016       }
15017       if (((bitField0_ & 0x00000100) == 0x00000100)) {
15018         size += com.google.protobuf.CodedOutputStream
15019           .computeUInt32Size(11, storeLimit_);
15020       }
15021       if (((bitField0_ & 0x00000200) == 0x00000200)) {
15022         size += com.google.protobuf.CodedOutputStream
15023           .computeUInt32Size(12, storeOffset_);
15024       }
15025       if (((bitField0_ & 0x00000400) == 0x00000400)) {
15026         size += com.google.protobuf.CodedOutputStream
15027           .computeBoolSize(13, loadColumnFamiliesOnDemand_);
15028       }
15029       if (((bitField0_ & 0x00000800) == 0x00000800)) {
15030         size += com.google.protobuf.CodedOutputStream
15031           .computeBoolSize(14, small_);
15032       }
15033       if (((bitField0_ & 0x00001000) == 0x00001000)) {
15034         size += com.google.protobuf.CodedOutputStream
15035           .computeBoolSize(15, reversed_);
15036       }
15037       if (((bitField0_ & 0x00002000) == 0x00002000)) {
15038         size += com.google.protobuf.CodedOutputStream
15039           .computeEnumSize(16, consistency_.getNumber());
15040       }
15041       if (((bitField0_ & 0x00004000) == 0x00004000)) {
15042         size += com.google.protobuf.CodedOutputStream
15043           .computeUInt32Size(17, caching_);
15044       }
15045       if (((bitField0_ & 0x00008000) == 0x00008000)) {
15046         size += com.google.protobuf.CodedOutputStream
15047           .computeBoolSize(18, allowPartialResults_);
15048       }
15049       for (int i = 0; i < cfTimeRange_.size(); i++) {
15050         size += com.google.protobuf.CodedOutputStream
15051           .computeMessageSize(19, cfTimeRange_.get(i));
15052       }
15053       size += getUnknownFields().getSerializedSize();
15054       memoizedSerializedSize = size;
15055       return size;
15056     }
15057 
15058     private static final long serialVersionUID = 0L;
15059     @java.lang.Override
writeReplace()15060     protected java.lang.Object writeReplace()
15061         throws java.io.ObjectStreamException {
15062       return super.writeReplace();
15063     }
15064 
15065     @java.lang.Override
equals(final java.lang.Object obj)15066     public boolean equals(final java.lang.Object obj) {
15067       if (obj == this) {
15068        return true;
15069       }
15070       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) {
15071         return super.equals(obj);
15072       }
15073       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj;
15074 
15075       boolean result = true;
15076       result = result && getColumnList()
15077           .equals(other.getColumnList());
15078       result = result && getAttributeList()
15079           .equals(other.getAttributeList());
15080       result = result && (hasStartRow() == other.hasStartRow());
15081       if (hasStartRow()) {
15082         result = result && getStartRow()
15083             .equals(other.getStartRow());
15084       }
15085       result = result && (hasStopRow() == other.hasStopRow());
15086       if (hasStopRow()) {
15087         result = result && getStopRow()
15088             .equals(other.getStopRow());
15089       }
15090       result = result && (hasFilter() == other.hasFilter());
15091       if (hasFilter()) {
15092         result = result && getFilter()
15093             .equals(other.getFilter());
15094       }
15095       result = result && (hasTimeRange() == other.hasTimeRange());
15096       if (hasTimeRange()) {
15097         result = result && getTimeRange()
15098             .equals(other.getTimeRange());
15099       }
15100       result = result && (hasMaxVersions() == other.hasMaxVersions());
15101       if (hasMaxVersions()) {
15102         result = result && (getMaxVersions()
15103             == other.getMaxVersions());
15104       }
15105       result = result && (hasCacheBlocks() == other.hasCacheBlocks());
15106       if (hasCacheBlocks()) {
15107         result = result && (getCacheBlocks()
15108             == other.getCacheBlocks());
15109       }
15110       result = result && (hasBatchSize() == other.hasBatchSize());
15111       if (hasBatchSize()) {
15112         result = result && (getBatchSize()
15113             == other.getBatchSize());
15114       }
15115       result = result && (hasMaxResultSize() == other.hasMaxResultSize());
15116       if (hasMaxResultSize()) {
15117         result = result && (getMaxResultSize()
15118             == other.getMaxResultSize());
15119       }
15120       result = result && (hasStoreLimit() == other.hasStoreLimit());
15121       if (hasStoreLimit()) {
15122         result = result && (getStoreLimit()
15123             == other.getStoreLimit());
15124       }
15125       result = result && (hasStoreOffset() == other.hasStoreOffset());
15126       if (hasStoreOffset()) {
15127         result = result && (getStoreOffset()
15128             == other.getStoreOffset());
15129       }
15130       result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand());
15131       if (hasLoadColumnFamiliesOnDemand()) {
15132         result = result && (getLoadColumnFamiliesOnDemand()
15133             == other.getLoadColumnFamiliesOnDemand());
15134       }
15135       result = result && (hasSmall() == other.hasSmall());
15136       if (hasSmall()) {
15137         result = result && (getSmall()
15138             == other.getSmall());
15139       }
15140       result = result && (hasReversed() == other.hasReversed());
15141       if (hasReversed()) {
15142         result = result && (getReversed()
15143             == other.getReversed());
15144       }
15145       result = result && (hasConsistency() == other.hasConsistency());
15146       if (hasConsistency()) {
15147         result = result &&
15148             (getConsistency() == other.getConsistency());
15149       }
15150       result = result && (hasCaching() == other.hasCaching());
15151       if (hasCaching()) {
15152         result = result && (getCaching()
15153             == other.getCaching());
15154       }
15155       result = result && (hasAllowPartialResults() == other.hasAllowPartialResults());
15156       if (hasAllowPartialResults()) {
15157         result = result && (getAllowPartialResults()
15158             == other.getAllowPartialResults());
15159       }
15160       result = result && getCfTimeRangeList()
15161           .equals(other.getCfTimeRangeList());
15162       result = result &&
15163           getUnknownFields().equals(other.getUnknownFields());
15164       return result;
15165     }
15166 
15167     private int memoizedHashCode = 0;
15168     @java.lang.Override
hashCode()15169     public int hashCode() {
15170       if (memoizedHashCode != 0) {
15171         return memoizedHashCode;
15172       }
15173       int hash = 41;
15174       hash = (19 * hash) + getDescriptorForType().hashCode();
15175       if (getColumnCount() > 0) {
15176         hash = (37 * hash) + COLUMN_FIELD_NUMBER;
15177         hash = (53 * hash) + getColumnList().hashCode();
15178       }
15179       if (getAttributeCount() > 0) {
15180         hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER;
15181         hash = (53 * hash) + getAttributeList().hashCode();
15182       }
15183       if (hasStartRow()) {
15184         hash = (37 * hash) + START_ROW_FIELD_NUMBER;
15185         hash = (53 * hash) + getStartRow().hashCode();
15186       }
15187       if (hasStopRow()) {
15188         hash = (37 * hash) + STOP_ROW_FIELD_NUMBER;
15189         hash = (53 * hash) + getStopRow().hashCode();
15190       }
15191       if (hasFilter()) {
15192         hash = (37 * hash) + FILTER_FIELD_NUMBER;
15193         hash = (53 * hash) + getFilter().hashCode();
15194       }
15195       if (hasTimeRange()) {
15196         hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER;
15197         hash = (53 * hash) + getTimeRange().hashCode();
15198       }
15199       if (hasMaxVersions()) {
15200         hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER;
15201         hash = (53 * hash) + getMaxVersions();
15202       }
15203       if (hasCacheBlocks()) {
15204         hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER;
15205         hash = (53 * hash) + hashBoolean(getCacheBlocks());
15206       }
15207       if (hasBatchSize()) {
15208         hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER;
15209         hash = (53 * hash) + getBatchSize();
15210       }
15211       if (hasMaxResultSize()) {
15212         hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER;
15213         hash = (53 * hash) + hashLong(getMaxResultSize());
15214       }
15215       if (hasStoreLimit()) {
15216         hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER;
15217         hash = (53 * hash) + getStoreLimit();
15218       }
15219       if (hasStoreOffset()) {
15220         hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER;
15221         hash = (53 * hash) + getStoreOffset();
15222       }
15223       if (hasLoadColumnFamiliesOnDemand()) {
15224         hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER;
15225         hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand());
15226       }
15227       if (hasSmall()) {
15228         hash = (37 * hash) + SMALL_FIELD_NUMBER;
15229         hash = (53 * hash) + hashBoolean(getSmall());
15230       }
15231       if (hasReversed()) {
15232         hash = (37 * hash) + REVERSED_FIELD_NUMBER;
15233         hash = (53 * hash) + hashBoolean(getReversed());
15234       }
15235       if (hasConsistency()) {
15236         hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER;
15237         hash = (53 * hash) + hashEnum(getConsistency());
15238       }
15239       if (hasCaching()) {
15240         hash = (37 * hash) + CACHING_FIELD_NUMBER;
15241         hash = (53 * hash) + getCaching();
15242       }
15243       if (hasAllowPartialResults()) {
15244         hash = (37 * hash) + ALLOW_PARTIAL_RESULTS_FIELD_NUMBER;
15245         hash = (53 * hash) + hashBoolean(getAllowPartialResults());
15246       }
15247       if (getCfTimeRangeCount() > 0) {
15248         hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER;
15249         hash = (53 * hash) + getCfTimeRangeList().hashCode();
15250       }
15251       hash = (29 * hash) + getUnknownFields().hashCode();
15252       memoizedHashCode = hash;
15253       return hash;
15254     }
15255 
parseFrom( com.google.protobuf.ByteString data)15256     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
15257         com.google.protobuf.ByteString data)
15258         throws com.google.protobuf.InvalidProtocolBufferException {
15259       return PARSER.parseFrom(data);
15260     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15261     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
15262         com.google.protobuf.ByteString data,
15263         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15264         throws com.google.protobuf.InvalidProtocolBufferException {
15265       return PARSER.parseFrom(data, extensionRegistry);
15266     }
parseFrom(byte[] data)15267     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data)
15268         throws com.google.protobuf.InvalidProtocolBufferException {
15269       return PARSER.parseFrom(data);
15270     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15271     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
15272         byte[] data,
15273         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15274         throws com.google.protobuf.InvalidProtocolBufferException {
15275       return PARSER.parseFrom(data, extensionRegistry);
15276     }
parseFrom(java.io.InputStream input)15277     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input)
15278         throws java.io.IOException {
15279       return PARSER.parseFrom(input);
15280     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15281     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
15282         java.io.InputStream input,
15283         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15284         throws java.io.IOException {
15285       return PARSER.parseFrom(input, extensionRegistry);
15286     }
parseDelimitedFrom(java.io.InputStream input)15287     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input)
15288         throws java.io.IOException {
15289       return PARSER.parseDelimitedFrom(input);
15290     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15291     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(
15292         java.io.InputStream input,
15293         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15294         throws java.io.IOException {
15295       return PARSER.parseDelimitedFrom(input, extensionRegistry);
15296     }
parseFrom( com.google.protobuf.CodedInputStream input)15297     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
15298         com.google.protobuf.CodedInputStream input)
15299         throws java.io.IOException {
15300       return PARSER.parseFrom(input);
15301     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15302     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(
15303         com.google.protobuf.CodedInputStream input,
15304         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15305         throws java.io.IOException {
15306       return PARSER.parseFrom(input, extensionRegistry);
15307     }
15308 
newBuilder()15309     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()15310     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype)15311     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) {
15312       return newBuilder().mergeFrom(prototype);
15313     }
toBuilder()15314     public Builder toBuilder() { return newBuilder(this); }
15315 
15316     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)15317     protected Builder newBuilderForType(
15318         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15319       Builder builder = new Builder(parent);
15320       return builder;
15321     }
15322     /**
15323      * Protobuf type {@code Scan}
15324      *
15325      * <pre>
15326      **
15327      * Instead of get from a table, you can scan it with optional filters.
15328      * You can specify the row key range, time range, the columns/families
15329      * to scan and so on.
15330      *
15331      * This scan is used the first time in a scan request. The response of
15332      * the initial scan will return a scanner id, which should be used to
15333      * fetch result batches later on before it is closed.
15334      * </pre>
15335      */
15336     public static final class Builder extends
15337         com.google.protobuf.GeneratedMessage.Builder<Builder>
15338        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder {
15339       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()15340           getDescriptor() {
15341         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor;
15342       }
15343 
15344       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()15345           internalGetFieldAccessorTable() {
15346         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable
15347             .ensureFieldAccessorsInitialized(
15348                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class);
15349       }
15350 
15351       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder()
Builder()15352       private Builder() {
15353         maybeForceBuilderInitialization();
15354       }
15355 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)15356       private Builder(
15357           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
15358         super(parent);
15359         maybeForceBuilderInitialization();
15360       }
maybeForceBuilderInitialization()15361       private void maybeForceBuilderInitialization() {
15362         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
15363           getColumnFieldBuilder();
15364           getAttributeFieldBuilder();
15365           getFilterFieldBuilder();
15366           getTimeRangeFieldBuilder();
15367           getCfTimeRangeFieldBuilder();
15368         }
15369       }
create()15370       private static Builder create() {
15371         return new Builder();
15372       }
15373 
clear()15374       public Builder clear() {
15375         super.clear();
15376         if (columnBuilder_ == null) {
15377           column_ = java.util.Collections.emptyList();
15378           bitField0_ = (bitField0_ & ~0x00000001);
15379         } else {
15380           columnBuilder_.clear();
15381         }
15382         if (attributeBuilder_ == null) {
15383           attribute_ = java.util.Collections.emptyList();
15384           bitField0_ = (bitField0_ & ~0x00000002);
15385         } else {
15386           attributeBuilder_.clear();
15387         }
15388         startRow_ = com.google.protobuf.ByteString.EMPTY;
15389         bitField0_ = (bitField0_ & ~0x00000004);
15390         stopRow_ = com.google.protobuf.ByteString.EMPTY;
15391         bitField0_ = (bitField0_ & ~0x00000008);
15392         if (filterBuilder_ == null) {
15393           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
15394         } else {
15395           filterBuilder_.clear();
15396         }
15397         bitField0_ = (bitField0_ & ~0x00000010);
15398         if (timeRangeBuilder_ == null) {
15399           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
15400         } else {
15401           timeRangeBuilder_.clear();
15402         }
15403         bitField0_ = (bitField0_ & ~0x00000020);
15404         maxVersions_ = 1;
15405         bitField0_ = (bitField0_ & ~0x00000040);
15406         cacheBlocks_ = true;
15407         bitField0_ = (bitField0_ & ~0x00000080);
15408         batchSize_ = 0;
15409         bitField0_ = (bitField0_ & ~0x00000100);
15410         maxResultSize_ = 0L;
15411         bitField0_ = (bitField0_ & ~0x00000200);
15412         storeLimit_ = 0;
15413         bitField0_ = (bitField0_ & ~0x00000400);
15414         storeOffset_ = 0;
15415         bitField0_ = (bitField0_ & ~0x00000800);
15416         loadColumnFamiliesOnDemand_ = false;
15417         bitField0_ = (bitField0_ & ~0x00001000);
15418         small_ = false;
15419         bitField0_ = (bitField0_ & ~0x00002000);
15420         reversed_ = false;
15421         bitField0_ = (bitField0_ & ~0x00004000);
15422         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
15423         bitField0_ = (bitField0_ & ~0x00008000);
15424         caching_ = 0;
15425         bitField0_ = (bitField0_ & ~0x00010000);
15426         allowPartialResults_ = false;
15427         bitField0_ = (bitField0_ & ~0x00020000);
15428         if (cfTimeRangeBuilder_ == null) {
15429           cfTimeRange_ = java.util.Collections.emptyList();
15430           bitField0_ = (bitField0_ & ~0x00040000);
15431         } else {
15432           cfTimeRangeBuilder_.clear();
15433         }
15434         return this;
15435       }
15436 
clone()15437       public Builder clone() {
15438         return create().mergeFrom(buildPartial());
15439       }
15440 
15441       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()15442           getDescriptorForType() {
15443         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor;
15444       }
15445 
getDefaultInstanceForType()15446       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() {
15447         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
15448       }
15449 
build()15450       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() {
15451         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial();
15452         if (!result.isInitialized()) {
15453           throw newUninitializedMessageException(result);
15454         }
15455         return result;
15456       }
15457 
buildPartial()15458       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() {
15459         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this);
15460         int from_bitField0_ = bitField0_;
15461         int to_bitField0_ = 0;
15462         if (columnBuilder_ == null) {
15463           if (((bitField0_ & 0x00000001) == 0x00000001)) {
15464             column_ = java.util.Collections.unmodifiableList(column_);
15465             bitField0_ = (bitField0_ & ~0x00000001);
15466           }
15467           result.column_ = column_;
15468         } else {
15469           result.column_ = columnBuilder_.build();
15470         }
15471         if (attributeBuilder_ == null) {
15472           if (((bitField0_ & 0x00000002) == 0x00000002)) {
15473             attribute_ = java.util.Collections.unmodifiableList(attribute_);
15474             bitField0_ = (bitField0_ & ~0x00000002);
15475           }
15476           result.attribute_ = attribute_;
15477         } else {
15478           result.attribute_ = attributeBuilder_.build();
15479         }
15480         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
15481           to_bitField0_ |= 0x00000001;
15482         }
15483         result.startRow_ = startRow_;
15484         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
15485           to_bitField0_ |= 0x00000002;
15486         }
15487         result.stopRow_ = stopRow_;
15488         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
15489           to_bitField0_ |= 0x00000004;
15490         }
15491         if (filterBuilder_ == null) {
15492           result.filter_ = filter_;
15493         } else {
15494           result.filter_ = filterBuilder_.build();
15495         }
15496         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
15497           to_bitField0_ |= 0x00000008;
15498         }
15499         if (timeRangeBuilder_ == null) {
15500           result.timeRange_ = timeRange_;
15501         } else {
15502           result.timeRange_ = timeRangeBuilder_.build();
15503         }
15504         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
15505           to_bitField0_ |= 0x00000010;
15506         }
15507         result.maxVersions_ = maxVersions_;
15508         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
15509           to_bitField0_ |= 0x00000020;
15510         }
15511         result.cacheBlocks_ = cacheBlocks_;
15512         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
15513           to_bitField0_ |= 0x00000040;
15514         }
15515         result.batchSize_ = batchSize_;
15516         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
15517           to_bitField0_ |= 0x00000080;
15518         }
15519         result.maxResultSize_ = maxResultSize_;
15520         if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
15521           to_bitField0_ |= 0x00000100;
15522         }
15523         result.storeLimit_ = storeLimit_;
15524         if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
15525           to_bitField0_ |= 0x00000200;
15526         }
15527         result.storeOffset_ = storeOffset_;
15528         if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
15529           to_bitField0_ |= 0x00000400;
15530         }
15531         result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_;
15532         if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
15533           to_bitField0_ |= 0x00000800;
15534         }
15535         result.small_ = small_;
15536         if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
15537           to_bitField0_ |= 0x00001000;
15538         }
15539         result.reversed_ = reversed_;
15540         if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
15541           to_bitField0_ |= 0x00002000;
15542         }
15543         result.consistency_ = consistency_;
15544         if (((from_bitField0_ & 0x00010000) == 0x00010000)) {
15545           to_bitField0_ |= 0x00004000;
15546         }
15547         result.caching_ = caching_;
15548         if (((from_bitField0_ & 0x00020000) == 0x00020000)) {
15549           to_bitField0_ |= 0x00008000;
15550         }
15551         result.allowPartialResults_ = allowPartialResults_;
15552         if (cfTimeRangeBuilder_ == null) {
15553           if (((bitField0_ & 0x00040000) == 0x00040000)) {
15554             cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_);
15555             bitField0_ = (bitField0_ & ~0x00040000);
15556           }
15557           result.cfTimeRange_ = cfTimeRange_;
15558         } else {
15559           result.cfTimeRange_ = cfTimeRangeBuilder_.build();
15560         }
15561         result.bitField0_ = to_bitField0_;
15562         onBuilt();
15563         return result;
15564       }
15565 
mergeFrom(com.google.protobuf.Message other)15566       public Builder mergeFrom(com.google.protobuf.Message other) {
15567         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) {
15568           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other);
15569         } else {
15570           super.mergeFrom(other);
15571           return this;
15572         }
15573       }
15574 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other)15575       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) {
15576         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this;
15577         if (columnBuilder_ == null) {
15578           if (!other.column_.isEmpty()) {
15579             if (column_.isEmpty()) {
15580               column_ = other.column_;
15581               bitField0_ = (bitField0_ & ~0x00000001);
15582             } else {
15583               ensureColumnIsMutable();
15584               column_.addAll(other.column_);
15585             }
15586             onChanged();
15587           }
15588         } else {
15589           if (!other.column_.isEmpty()) {
15590             if (columnBuilder_.isEmpty()) {
15591               columnBuilder_.dispose();
15592               columnBuilder_ = null;
15593               column_ = other.column_;
15594               bitField0_ = (bitField0_ & ~0x00000001);
15595               columnBuilder_ =
15596                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
15597                    getColumnFieldBuilder() : null;
15598             } else {
15599               columnBuilder_.addAllMessages(other.column_);
15600             }
15601           }
15602         }
15603         if (attributeBuilder_ == null) {
15604           if (!other.attribute_.isEmpty()) {
15605             if (attribute_.isEmpty()) {
15606               attribute_ = other.attribute_;
15607               bitField0_ = (bitField0_ & ~0x00000002);
15608             } else {
15609               ensureAttributeIsMutable();
15610               attribute_.addAll(other.attribute_);
15611             }
15612             onChanged();
15613           }
15614         } else {
15615           if (!other.attribute_.isEmpty()) {
15616             if (attributeBuilder_.isEmpty()) {
15617               attributeBuilder_.dispose();
15618               attributeBuilder_ = null;
15619               attribute_ = other.attribute_;
15620               bitField0_ = (bitField0_ & ~0x00000002);
15621               attributeBuilder_ =
15622                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
15623                    getAttributeFieldBuilder() : null;
15624             } else {
15625               attributeBuilder_.addAllMessages(other.attribute_);
15626             }
15627           }
15628         }
15629         if (other.hasStartRow()) {
15630           setStartRow(other.getStartRow());
15631         }
15632         if (other.hasStopRow()) {
15633           setStopRow(other.getStopRow());
15634         }
15635         if (other.hasFilter()) {
15636           mergeFilter(other.getFilter());
15637         }
15638         if (other.hasTimeRange()) {
15639           mergeTimeRange(other.getTimeRange());
15640         }
15641         if (other.hasMaxVersions()) {
15642           setMaxVersions(other.getMaxVersions());
15643         }
15644         if (other.hasCacheBlocks()) {
15645           setCacheBlocks(other.getCacheBlocks());
15646         }
15647         if (other.hasBatchSize()) {
15648           setBatchSize(other.getBatchSize());
15649         }
15650         if (other.hasMaxResultSize()) {
15651           setMaxResultSize(other.getMaxResultSize());
15652         }
15653         if (other.hasStoreLimit()) {
15654           setStoreLimit(other.getStoreLimit());
15655         }
15656         if (other.hasStoreOffset()) {
15657           setStoreOffset(other.getStoreOffset());
15658         }
15659         if (other.hasLoadColumnFamiliesOnDemand()) {
15660           setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand());
15661         }
15662         if (other.hasSmall()) {
15663           setSmall(other.getSmall());
15664         }
15665         if (other.hasReversed()) {
15666           setReversed(other.getReversed());
15667         }
15668         if (other.hasConsistency()) {
15669           setConsistency(other.getConsistency());
15670         }
15671         if (other.hasCaching()) {
15672           setCaching(other.getCaching());
15673         }
15674         if (other.hasAllowPartialResults()) {
15675           setAllowPartialResults(other.getAllowPartialResults());
15676         }
15677         if (cfTimeRangeBuilder_ == null) {
15678           if (!other.cfTimeRange_.isEmpty()) {
15679             if (cfTimeRange_.isEmpty()) {
15680               cfTimeRange_ = other.cfTimeRange_;
15681               bitField0_ = (bitField0_ & ~0x00040000);
15682             } else {
15683               ensureCfTimeRangeIsMutable();
15684               cfTimeRange_.addAll(other.cfTimeRange_);
15685             }
15686             onChanged();
15687           }
15688         } else {
15689           if (!other.cfTimeRange_.isEmpty()) {
15690             if (cfTimeRangeBuilder_.isEmpty()) {
15691               cfTimeRangeBuilder_.dispose();
15692               cfTimeRangeBuilder_ = null;
15693               cfTimeRange_ = other.cfTimeRange_;
15694               bitField0_ = (bitField0_ & ~0x00040000);
15695               cfTimeRangeBuilder_ =
15696                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
15697                    getCfTimeRangeFieldBuilder() : null;
15698             } else {
15699               cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_);
15700             }
15701           }
15702         }
15703         this.mergeUnknownFields(other.getUnknownFields());
15704         return this;
15705       }
15706 
isInitialized()15707       public final boolean isInitialized() {
15708         for (int i = 0; i < getColumnCount(); i++) {
15709           if (!getColumn(i).isInitialized()) {
15710 
15711             return false;
15712           }
15713         }
15714         for (int i = 0; i < getAttributeCount(); i++) {
15715           if (!getAttribute(i).isInitialized()) {
15716 
15717             return false;
15718           }
15719         }
15720         if (hasFilter()) {
15721           if (!getFilter().isInitialized()) {
15722 
15723             return false;
15724           }
15725         }
15726         for (int i = 0; i < getCfTimeRangeCount(); i++) {
15727           if (!getCfTimeRange(i).isInitialized()) {
15728 
15729             return false;
15730           }
15731         }
15732         return true;
15733       }
15734 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15735       public Builder mergeFrom(
15736           com.google.protobuf.CodedInputStream input,
15737           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
15738           throws java.io.IOException {
15739         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parsedMessage = null;
15740         try {
15741           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
15742         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
15743           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage();
15744           throw e;
15745         } finally {
15746           if (parsedMessage != null) {
15747             mergeFrom(parsedMessage);
15748           }
15749         }
15750         return this;
15751       }
15752       private int bitField0_;
15753 
15754       // repeated .Column column = 1;
15755       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ =
15756         java.util.Collections.emptyList();
ensureColumnIsMutable()15757       private void ensureColumnIsMutable() {
15758         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
15759           column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_);
15760           bitField0_ |= 0x00000001;
15761          }
15762       }
15763 
15764       private com.google.protobuf.RepeatedFieldBuilder<
15765           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_;
15766 
15767       /**
15768        * <code>repeated .Column column = 1;</code>
15769        */
getColumnList()15770       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() {
15771         if (columnBuilder_ == null) {
15772           return java.util.Collections.unmodifiableList(column_);
15773         } else {
15774           return columnBuilder_.getMessageList();
15775         }
15776       }
15777       /**
15778        * <code>repeated .Column column = 1;</code>
15779        */
getColumnCount()15780       public int getColumnCount() {
15781         if (columnBuilder_ == null) {
15782           return column_.size();
15783         } else {
15784           return columnBuilder_.getCount();
15785         }
15786       }
15787       /**
15788        * <code>repeated .Column column = 1;</code>
15789        */
getColumn(int index)15790       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) {
15791         if (columnBuilder_ == null) {
15792           return column_.get(index);
15793         } else {
15794           return columnBuilder_.getMessage(index);
15795         }
15796       }
15797       /**
15798        * <code>repeated .Column column = 1;</code>
15799        */
setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)15800       public Builder setColumn(
15801           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15802         if (columnBuilder_ == null) {
15803           if (value == null) {
15804             throw new NullPointerException();
15805           }
15806           ensureColumnIsMutable();
15807           column_.set(index, value);
15808           onChanged();
15809         } else {
15810           columnBuilder_.setMessage(index, value);
15811         }
15812         return this;
15813       }
15814       /**
15815        * <code>repeated .Column column = 1;</code>
15816        */
setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)15817       public Builder setColumn(
15818           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15819         if (columnBuilder_ == null) {
15820           ensureColumnIsMutable();
15821           column_.set(index, builderForValue.build());
15822           onChanged();
15823         } else {
15824           columnBuilder_.setMessage(index, builderForValue.build());
15825         }
15826         return this;
15827       }
15828       /**
15829        * <code>repeated .Column column = 1;</code>
15830        */
addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)15831       public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15832         if (columnBuilder_ == null) {
15833           if (value == null) {
15834             throw new NullPointerException();
15835           }
15836           ensureColumnIsMutable();
15837           column_.add(value);
15838           onChanged();
15839         } else {
15840           columnBuilder_.addMessage(value);
15841         }
15842         return this;
15843       }
15844       /**
15845        * <code>repeated .Column column = 1;</code>
15846        */
addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)15847       public Builder addColumn(
15848           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) {
15849         if (columnBuilder_ == null) {
15850           if (value == null) {
15851             throw new NullPointerException();
15852           }
15853           ensureColumnIsMutable();
15854           column_.add(index, value);
15855           onChanged();
15856         } else {
15857           columnBuilder_.addMessage(index, value);
15858         }
15859         return this;
15860       }
15861       /**
15862        * <code>repeated .Column column = 1;</code>
15863        */
addColumn( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)15864       public Builder addColumn(
15865           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15866         if (columnBuilder_ == null) {
15867           ensureColumnIsMutable();
15868           column_.add(builderForValue.build());
15869           onChanged();
15870         } else {
15871           columnBuilder_.addMessage(builderForValue.build());
15872         }
15873         return this;
15874       }
15875       /**
15876        * <code>repeated .Column column = 1;</code>
15877        */
addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)15878       public Builder addColumn(
15879           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) {
15880         if (columnBuilder_ == null) {
15881           ensureColumnIsMutable();
15882           column_.add(index, builderForValue.build());
15883           onChanged();
15884         } else {
15885           columnBuilder_.addMessage(index, builderForValue.build());
15886         }
15887         return this;
15888       }
15889       /**
15890        * <code>repeated .Column column = 1;</code>
15891        */
addAllColumn( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values)15892       public Builder addAllColumn(
15893           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) {
15894         if (columnBuilder_ == null) {
15895           ensureColumnIsMutable();
15896           super.addAll(values, column_);
15897           onChanged();
15898         } else {
15899           columnBuilder_.addAllMessages(values);
15900         }
15901         return this;
15902       }
15903       /**
15904        * <code>repeated .Column column = 1;</code>
15905        */
clearColumn()15906       public Builder clearColumn() {
15907         if (columnBuilder_ == null) {
15908           column_ = java.util.Collections.emptyList();
15909           bitField0_ = (bitField0_ & ~0x00000001);
15910           onChanged();
15911         } else {
15912           columnBuilder_.clear();
15913         }
15914         return this;
15915       }
15916       /**
15917        * <code>repeated .Column column = 1;</code>
15918        */
removeColumn(int index)15919       public Builder removeColumn(int index) {
15920         if (columnBuilder_ == null) {
15921           ensureColumnIsMutable();
15922           column_.remove(index);
15923           onChanged();
15924         } else {
15925           columnBuilder_.remove(index);
15926         }
15927         return this;
15928       }
15929       /**
15930        * <code>repeated .Column column = 1;</code>
15931        */
getColumnBuilder( int index)15932       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder(
15933           int index) {
15934         return getColumnFieldBuilder().getBuilder(index);
15935       }
15936       /**
15937        * <code>repeated .Column column = 1;</code>
15938        */
getColumnOrBuilder( int index)15939       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder(
15940           int index) {
15941         if (columnBuilder_ == null) {
15942           return column_.get(index);  } else {
15943           return columnBuilder_.getMessageOrBuilder(index);
15944         }
15945       }
15946       /**
15947        * <code>repeated .Column column = 1;</code>
15948        */
15949       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnOrBuilderList()15950            getColumnOrBuilderList() {
15951         if (columnBuilder_ != null) {
15952           return columnBuilder_.getMessageOrBuilderList();
15953         } else {
15954           return java.util.Collections.unmodifiableList(column_);
15955         }
15956       }
15957       /**
15958        * <code>repeated .Column column = 1;</code>
15959        */
addColumnBuilder()15960       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() {
15961         return getColumnFieldBuilder().addBuilder(
15962             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
15963       }
15964       /**
15965        * <code>repeated .Column column = 1;</code>
15966        */
addColumnBuilder( int index)15967       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder(
15968           int index) {
15969         return getColumnFieldBuilder().addBuilder(
15970             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance());
15971       }
15972       /**
15973        * <code>repeated .Column column = 1;</code>
15974        */
15975       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder>
getColumnBuilderList()15976            getColumnBuilderList() {
15977         return getColumnFieldBuilder().getBuilderList();
15978       }
15979       private com.google.protobuf.RepeatedFieldBuilder<
15980           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>
getColumnFieldBuilder()15981           getColumnFieldBuilder() {
15982         if (columnBuilder_ == null) {
15983           columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
15984               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>(
15985                   column_,
15986                   ((bitField0_ & 0x00000001) == 0x00000001),
15987                   getParentForChildren(),
15988                   isClean());
15989           column_ = null;
15990         }
15991         return columnBuilder_;
15992       }
15993 
15994       // repeated .NameBytesPair attribute = 2;
15995       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ =
15996         java.util.Collections.emptyList();
ensureAttributeIsMutable()15997       private void ensureAttributeIsMutable() {
15998         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
15999           attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_);
16000           bitField0_ |= 0x00000002;
16001          }
16002       }
16003 
16004       private com.google.protobuf.RepeatedFieldBuilder<
16005           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_;
16006 
16007       /**
16008        * <code>repeated .NameBytesPair attribute = 2;</code>
16009        */
getAttributeList()16010       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() {
16011         if (attributeBuilder_ == null) {
16012           return java.util.Collections.unmodifiableList(attribute_);
16013         } else {
16014           return attributeBuilder_.getMessageList();
16015         }
16016       }
16017       /**
16018        * <code>repeated .NameBytesPair attribute = 2;</code>
16019        */
getAttributeCount()16020       public int getAttributeCount() {
16021         if (attributeBuilder_ == null) {
16022           return attribute_.size();
16023         } else {
16024           return attributeBuilder_.getCount();
16025         }
16026       }
16027       /**
16028        * <code>repeated .NameBytesPair attribute = 2;</code>
16029        */
getAttribute(int index)16030       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) {
16031         if (attributeBuilder_ == null) {
16032           return attribute_.get(index);
16033         } else {
16034           return attributeBuilder_.getMessage(index);
16035         }
16036       }
16037       /**
16038        * <code>repeated .NameBytesPair attribute = 2;</code>
16039        */
setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)16040       public Builder setAttribute(
16041           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
16042         if (attributeBuilder_ == null) {
16043           if (value == null) {
16044             throw new NullPointerException();
16045           }
16046           ensureAttributeIsMutable();
16047           attribute_.set(index, value);
16048           onChanged();
16049         } else {
16050           attributeBuilder_.setMessage(index, value);
16051         }
16052         return this;
16053       }
16054       /**
16055        * <code>repeated .NameBytesPair attribute = 2;</code>
16056        */
setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)16057       public Builder setAttribute(
16058           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
16059         if (attributeBuilder_ == null) {
16060           ensureAttributeIsMutable();
16061           attribute_.set(index, builderForValue.build());
16062           onChanged();
16063         } else {
16064           attributeBuilder_.setMessage(index, builderForValue.build());
16065         }
16066         return this;
16067       }
16068       /**
16069        * <code>repeated .NameBytesPair attribute = 2;</code>
16070        */
addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)16071       public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
16072         if (attributeBuilder_ == null) {
16073           if (value == null) {
16074             throw new NullPointerException();
16075           }
16076           ensureAttributeIsMutable();
16077           attribute_.add(value);
16078           onChanged();
16079         } else {
16080           attributeBuilder_.addMessage(value);
16081         }
16082         return this;
16083       }
16084       /**
16085        * <code>repeated .NameBytesPair attribute = 2;</code>
16086        */
addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)16087       public Builder addAttribute(
16088           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
16089         if (attributeBuilder_ == null) {
16090           if (value == null) {
16091             throw new NullPointerException();
16092           }
16093           ensureAttributeIsMutable();
16094           attribute_.add(index, value);
16095           onChanged();
16096         } else {
16097           attributeBuilder_.addMessage(index, value);
16098         }
16099         return this;
16100       }
16101       /**
16102        * <code>repeated .NameBytesPair attribute = 2;</code>
16103        */
addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)16104       public Builder addAttribute(
16105           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
16106         if (attributeBuilder_ == null) {
16107           ensureAttributeIsMutable();
16108           attribute_.add(builderForValue.build());
16109           onChanged();
16110         } else {
16111           attributeBuilder_.addMessage(builderForValue.build());
16112         }
16113         return this;
16114       }
16115       /**
16116        * <code>repeated .NameBytesPair attribute = 2;</code>
16117        */
addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)16118       public Builder addAttribute(
16119           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
16120         if (attributeBuilder_ == null) {
16121           ensureAttributeIsMutable();
16122           attribute_.add(index, builderForValue.build());
16123           onChanged();
16124         } else {
16125           attributeBuilder_.addMessage(index, builderForValue.build());
16126         }
16127         return this;
16128       }
16129       /**
16130        * <code>repeated .NameBytesPair attribute = 2;</code>
16131        */
addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values)16132       public Builder addAllAttribute(
16133           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
16134         if (attributeBuilder_ == null) {
16135           ensureAttributeIsMutable();
16136           super.addAll(values, attribute_);
16137           onChanged();
16138         } else {
16139           attributeBuilder_.addAllMessages(values);
16140         }
16141         return this;
16142       }
16143       /**
16144        * <code>repeated .NameBytesPair attribute = 2;</code>
16145        */
clearAttribute()16146       public Builder clearAttribute() {
16147         if (attributeBuilder_ == null) {
16148           attribute_ = java.util.Collections.emptyList();
16149           bitField0_ = (bitField0_ & ~0x00000002);
16150           onChanged();
16151         } else {
16152           attributeBuilder_.clear();
16153         }
16154         return this;
16155       }
16156       /**
16157        * <code>repeated .NameBytesPair attribute = 2;</code>
16158        */
removeAttribute(int index)16159       public Builder removeAttribute(int index) {
16160         if (attributeBuilder_ == null) {
16161           ensureAttributeIsMutable();
16162           attribute_.remove(index);
16163           onChanged();
16164         } else {
16165           attributeBuilder_.remove(index);
16166         }
16167         return this;
16168       }
16169       /**
16170        * <code>repeated .NameBytesPair attribute = 2;</code>
16171        */
getAttributeBuilder( int index)16172       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder(
16173           int index) {
16174         return getAttributeFieldBuilder().getBuilder(index);
16175       }
16176       /**
16177        * <code>repeated .NameBytesPair attribute = 2;</code>
16178        */
getAttributeOrBuilder( int index)16179       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder(
16180           int index) {
16181         if (attributeBuilder_ == null) {
16182           return attribute_.get(index);  } else {
16183           return attributeBuilder_.getMessageOrBuilder(index);
16184         }
16185       }
16186       /**
16187        * <code>repeated .NameBytesPair attribute = 2;</code>
16188        */
16189       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeOrBuilderList()16190            getAttributeOrBuilderList() {
16191         if (attributeBuilder_ != null) {
16192           return attributeBuilder_.getMessageOrBuilderList();
16193         } else {
16194           return java.util.Collections.unmodifiableList(attribute_);
16195         }
16196       }
16197       /**
16198        * <code>repeated .NameBytesPair attribute = 2;</code>
16199        */
addAttributeBuilder()16200       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() {
16201         return getAttributeFieldBuilder().addBuilder(
16202             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
16203       }
16204       /**
16205        * <code>repeated .NameBytesPair attribute = 2;</code>
16206        */
addAttributeBuilder( int index)16207       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder(
16208           int index) {
16209         return getAttributeFieldBuilder().addBuilder(
16210             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
16211       }
16212       /**
16213        * <code>repeated .NameBytesPair attribute = 2;</code>
16214        */
16215       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder>
getAttributeBuilderList()16216            getAttributeBuilderList() {
16217         return getAttributeFieldBuilder().getBuilderList();
16218       }
16219       private com.google.protobuf.RepeatedFieldBuilder<
16220           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getAttributeFieldBuilder()16221           getAttributeFieldBuilder() {
16222         if (attributeBuilder_ == null) {
16223           attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
16224               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
16225                   attribute_,
16226                   ((bitField0_ & 0x00000002) == 0x00000002),
16227                   getParentForChildren(),
16228                   isClean());
16229           attribute_ = null;
16230         }
16231         return attributeBuilder_;
16232       }
16233 
16234       // optional bytes start_row = 3;
16235       private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY;
16236       /**
16237        * <code>optional bytes start_row = 3;</code>
16238        */
hasStartRow()16239       public boolean hasStartRow() {
16240         return ((bitField0_ & 0x00000004) == 0x00000004);
16241       }
16242       /**
16243        * <code>optional bytes start_row = 3;</code>
16244        */
getStartRow()16245       public com.google.protobuf.ByteString getStartRow() {
16246         return startRow_;
16247       }
16248       /**
16249        * <code>optional bytes start_row = 3;</code>
16250        */
setStartRow(com.google.protobuf.ByteString value)16251       public Builder setStartRow(com.google.protobuf.ByteString value) {
16252         if (value == null) {
16253     throw new NullPointerException();
16254   }
16255   bitField0_ |= 0x00000004;
16256         startRow_ = value;
16257         onChanged();
16258         return this;
16259       }
16260       /**
16261        * <code>optional bytes start_row = 3;</code>
16262        */
clearStartRow()16263       public Builder clearStartRow() {
16264         bitField0_ = (bitField0_ & ~0x00000004);
16265         startRow_ = getDefaultInstance().getStartRow();
16266         onChanged();
16267         return this;
16268       }
16269 
16270       // optional bytes stop_row = 4;
16271       private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY;
16272       /**
16273        * <code>optional bytes stop_row = 4;</code>
16274        */
hasStopRow()16275       public boolean hasStopRow() {
16276         return ((bitField0_ & 0x00000008) == 0x00000008);
16277       }
16278       /**
16279        * <code>optional bytes stop_row = 4;</code>
16280        */
getStopRow()16281       public com.google.protobuf.ByteString getStopRow() {
16282         return stopRow_;
16283       }
16284       /**
16285        * <code>optional bytes stop_row = 4;</code>
16286        */
setStopRow(com.google.protobuf.ByteString value)16287       public Builder setStopRow(com.google.protobuf.ByteString value) {
16288         if (value == null) {
16289     throw new NullPointerException();
16290   }
16291   bitField0_ |= 0x00000008;
16292         stopRow_ = value;
16293         onChanged();
16294         return this;
16295       }
16296       /**
16297        * <code>optional bytes stop_row = 4;</code>
16298        */
clearStopRow()16299       public Builder clearStopRow() {
16300         bitField0_ = (bitField0_ & ~0x00000008);
16301         stopRow_ = getDefaultInstance().getStopRow();
16302         onChanged();
16303         return this;
16304       }
16305 
16306       // optional .Filter filter = 5;
16307       private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
16308       private com.google.protobuf.SingleFieldBuilder<
16309           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_;
16310       /**
16311        * <code>optional .Filter filter = 5;</code>
16312        */
hasFilter()16313       public boolean hasFilter() {
16314         return ((bitField0_ & 0x00000010) == 0x00000010);
16315       }
16316       /**
16317        * <code>optional .Filter filter = 5;</code>
16318        */
getFilter()16319       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() {
16320         if (filterBuilder_ == null) {
16321           return filter_;
16322         } else {
16323           return filterBuilder_.getMessage();
16324         }
16325       }
16326       /**
16327        * <code>optional .Filter filter = 5;</code>
16328        */
setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)16329       public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
16330         if (filterBuilder_ == null) {
16331           if (value == null) {
16332             throw new NullPointerException();
16333           }
16334           filter_ = value;
16335           onChanged();
16336         } else {
16337           filterBuilder_.setMessage(value);
16338         }
16339         bitField0_ |= 0x00000010;
16340         return this;
16341       }
16342       /**
16343        * <code>optional .Filter filter = 5;</code>
16344        */
setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)16345       public Builder setFilter(
16346           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) {
16347         if (filterBuilder_ == null) {
16348           filter_ = builderForValue.build();
16349           onChanged();
16350         } else {
16351           filterBuilder_.setMessage(builderForValue.build());
16352         }
16353         bitField0_ |= 0x00000010;
16354         return this;
16355       }
16356       /**
16357        * <code>optional .Filter filter = 5;</code>
16358        */
mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)16359       public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) {
16360         if (filterBuilder_ == null) {
16361           if (((bitField0_ & 0x00000010) == 0x00000010) &&
16362               filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) {
16363             filter_ =
16364               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial();
16365           } else {
16366             filter_ = value;
16367           }
16368           onChanged();
16369         } else {
16370           filterBuilder_.mergeFrom(value);
16371         }
16372         bitField0_ |= 0x00000010;
16373         return this;
16374       }
16375       /**
16376        * <code>optional .Filter filter = 5;</code>
16377        */
clearFilter()16378       public Builder clearFilter() {
16379         if (filterBuilder_ == null) {
16380           filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance();
16381           onChanged();
16382         } else {
16383           filterBuilder_.clear();
16384         }
16385         bitField0_ = (bitField0_ & ~0x00000010);
16386         return this;
16387       }
16388       /**
16389        * <code>optional .Filter filter = 5;</code>
16390        */
getFilterBuilder()16391       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() {
16392         bitField0_ |= 0x00000010;
16393         onChanged();
16394         return getFilterFieldBuilder().getBuilder();
16395       }
16396       /**
16397        * <code>optional .Filter filter = 5;</code>
16398        */
getFilterOrBuilder()16399       public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() {
16400         if (filterBuilder_ != null) {
16401           return filterBuilder_.getMessageOrBuilder();
16402         } else {
16403           return filter_;
16404         }
16405       }
16406       /**
16407        * <code>optional .Filter filter = 5;</code>
16408        */
16409       private com.google.protobuf.SingleFieldBuilder<
16410           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>
getFilterFieldBuilder()16411           getFilterFieldBuilder() {
16412         if (filterBuilder_ == null) {
16413           filterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
16414               org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>(
16415                   filter_,
16416                   getParentForChildren(),
16417                   isClean());
16418           filter_ = null;
16419         }
16420         return filterBuilder_;
16421       }
16422 
16423       // optional .TimeRange time_range = 6;
16424       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
16425       private com.google.protobuf.SingleFieldBuilder<
16426           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_;
16427       /**
16428        * <code>optional .TimeRange time_range = 6;</code>
16429        */
hasTimeRange()16430       public boolean hasTimeRange() {
16431         return ((bitField0_ & 0x00000020) == 0x00000020);
16432       }
16433       /**
16434        * <code>optional .TimeRange time_range = 6;</code>
16435        */
getTimeRange()16436       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() {
16437         if (timeRangeBuilder_ == null) {
16438           return timeRange_;
16439         } else {
16440           return timeRangeBuilder_.getMessage();
16441         }
16442       }
16443       /**
16444        * <code>optional .TimeRange time_range = 6;</code>
16445        */
setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)16446       public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
16447         if (timeRangeBuilder_ == null) {
16448           if (value == null) {
16449             throw new NullPointerException();
16450           }
16451           timeRange_ = value;
16452           onChanged();
16453         } else {
16454           timeRangeBuilder_.setMessage(value);
16455         }
16456         bitField0_ |= 0x00000020;
16457         return this;
16458       }
16459       /**
16460        * <code>optional .TimeRange time_range = 6;</code>
16461        */
setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)16462       public Builder setTimeRange(
16463           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) {
16464         if (timeRangeBuilder_ == null) {
16465           timeRange_ = builderForValue.build();
16466           onChanged();
16467         } else {
16468           timeRangeBuilder_.setMessage(builderForValue.build());
16469         }
16470         bitField0_ |= 0x00000020;
16471         return this;
16472       }
16473       /**
16474        * <code>optional .TimeRange time_range = 6;</code>
16475        */
mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)16476       public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) {
16477         if (timeRangeBuilder_ == null) {
16478           if (((bitField0_ & 0x00000020) == 0x00000020) &&
16479               timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) {
16480             timeRange_ =
16481               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial();
16482           } else {
16483             timeRange_ = value;
16484           }
16485           onChanged();
16486         } else {
16487           timeRangeBuilder_.mergeFrom(value);
16488         }
16489         bitField0_ |= 0x00000020;
16490         return this;
16491       }
16492       /**
16493        * <code>optional .TimeRange time_range = 6;</code>
16494        */
clearTimeRange()16495       public Builder clearTimeRange() {
16496         if (timeRangeBuilder_ == null) {
16497           timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance();
16498           onChanged();
16499         } else {
16500           timeRangeBuilder_.clear();
16501         }
16502         bitField0_ = (bitField0_ & ~0x00000020);
16503         return this;
16504       }
16505       /**
16506        * <code>optional .TimeRange time_range = 6;</code>
16507        */
getTimeRangeBuilder()16508       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() {
16509         bitField0_ |= 0x00000020;
16510         onChanged();
16511         return getTimeRangeFieldBuilder().getBuilder();
16512       }
16513       /**
16514        * <code>optional .TimeRange time_range = 6;</code>
16515        */
getTimeRangeOrBuilder()16516       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() {
16517         if (timeRangeBuilder_ != null) {
16518           return timeRangeBuilder_.getMessageOrBuilder();
16519         } else {
16520           return timeRange_;
16521         }
16522       }
16523       /**
16524        * <code>optional .TimeRange time_range = 6;</code>
16525        */
16526       private com.google.protobuf.SingleFieldBuilder<
16527           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>
getTimeRangeFieldBuilder()16528           getTimeRangeFieldBuilder() {
16529         if (timeRangeBuilder_ == null) {
16530           timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder<
16531               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>(
16532                   timeRange_,
16533                   getParentForChildren(),
16534                   isClean());
16535           timeRange_ = null;
16536         }
16537         return timeRangeBuilder_;
16538       }
16539 
16540       // optional uint32 max_versions = 7 [default = 1];
16541       private int maxVersions_ = 1;
16542       /**
16543        * <code>optional uint32 max_versions = 7 [default = 1];</code>
16544        */
hasMaxVersions()16545       public boolean hasMaxVersions() {
16546         return ((bitField0_ & 0x00000040) == 0x00000040);
16547       }
16548       /**
16549        * <code>optional uint32 max_versions = 7 [default = 1];</code>
16550        */
getMaxVersions()16551       public int getMaxVersions() {
16552         return maxVersions_;
16553       }
16554       /**
16555        * <code>optional uint32 max_versions = 7 [default = 1];</code>
16556        */
setMaxVersions(int value)16557       public Builder setMaxVersions(int value) {
16558         bitField0_ |= 0x00000040;
16559         maxVersions_ = value;
16560         onChanged();
16561         return this;
16562       }
16563       /**
16564        * <code>optional uint32 max_versions = 7 [default = 1];</code>
16565        */
clearMaxVersions()16566       public Builder clearMaxVersions() {
16567         bitField0_ = (bitField0_ & ~0x00000040);
16568         maxVersions_ = 1;
16569         onChanged();
16570         return this;
16571       }
16572 
16573       // optional bool cache_blocks = 8 [default = true];
16574       private boolean cacheBlocks_ = true;
16575       /**
16576        * <code>optional bool cache_blocks = 8 [default = true];</code>
16577        */
hasCacheBlocks()16578       public boolean hasCacheBlocks() {
16579         return ((bitField0_ & 0x00000080) == 0x00000080);
16580       }
16581       /**
16582        * <code>optional bool cache_blocks = 8 [default = true];</code>
16583        */
getCacheBlocks()16584       public boolean getCacheBlocks() {
16585         return cacheBlocks_;
16586       }
16587       /**
16588        * <code>optional bool cache_blocks = 8 [default = true];</code>
16589        */
setCacheBlocks(boolean value)16590       public Builder setCacheBlocks(boolean value) {
16591         bitField0_ |= 0x00000080;
16592         cacheBlocks_ = value;
16593         onChanged();
16594         return this;
16595       }
16596       /**
16597        * <code>optional bool cache_blocks = 8 [default = true];</code>
16598        */
clearCacheBlocks()16599       public Builder clearCacheBlocks() {
16600         bitField0_ = (bitField0_ & ~0x00000080);
16601         cacheBlocks_ = true;
16602         onChanged();
16603         return this;
16604       }
16605 
16606       // optional uint32 batch_size = 9;
16607       private int batchSize_ ;
16608       /**
16609        * <code>optional uint32 batch_size = 9;</code>
16610        */
hasBatchSize()16611       public boolean hasBatchSize() {
16612         return ((bitField0_ & 0x00000100) == 0x00000100);
16613       }
16614       /**
16615        * <code>optional uint32 batch_size = 9;</code>
16616        */
getBatchSize()16617       public int getBatchSize() {
16618         return batchSize_;
16619       }
16620       /**
16621        * <code>optional uint32 batch_size = 9;</code>
16622        */
setBatchSize(int value)16623       public Builder setBatchSize(int value) {
16624         bitField0_ |= 0x00000100;
16625         batchSize_ = value;
16626         onChanged();
16627         return this;
16628       }
16629       /**
16630        * <code>optional uint32 batch_size = 9;</code>
16631        */
clearBatchSize()16632       public Builder clearBatchSize() {
16633         bitField0_ = (bitField0_ & ~0x00000100);
16634         batchSize_ = 0;
16635         onChanged();
16636         return this;
16637       }
16638 
16639       // optional uint64 max_result_size = 10;
16640       private long maxResultSize_ ;
16641       /**
16642        * <code>optional uint64 max_result_size = 10;</code>
16643        */
hasMaxResultSize()16644       public boolean hasMaxResultSize() {
16645         return ((bitField0_ & 0x00000200) == 0x00000200);
16646       }
16647       /**
16648        * <code>optional uint64 max_result_size = 10;</code>
16649        */
getMaxResultSize()16650       public long getMaxResultSize() {
16651         return maxResultSize_;
16652       }
16653       /**
16654        * <code>optional uint64 max_result_size = 10;</code>
16655        */
setMaxResultSize(long value)16656       public Builder setMaxResultSize(long value) {
16657         bitField0_ |= 0x00000200;
16658         maxResultSize_ = value;
16659         onChanged();
16660         return this;
16661       }
16662       /**
16663        * <code>optional uint64 max_result_size = 10;</code>
16664        */
clearMaxResultSize()16665       public Builder clearMaxResultSize() {
16666         bitField0_ = (bitField0_ & ~0x00000200);
16667         maxResultSize_ = 0L;
16668         onChanged();
16669         return this;
16670       }
16671 
16672       // optional uint32 store_limit = 11;
16673       private int storeLimit_ ;
16674       /**
16675        * <code>optional uint32 store_limit = 11;</code>
16676        */
hasStoreLimit()16677       public boolean hasStoreLimit() {
16678         return ((bitField0_ & 0x00000400) == 0x00000400);
16679       }
16680       /**
16681        * <code>optional uint32 store_limit = 11;</code>
16682        */
getStoreLimit()16683       public int getStoreLimit() {
16684         return storeLimit_;
16685       }
16686       /**
16687        * <code>optional uint32 store_limit = 11;</code>
16688        */
setStoreLimit(int value)16689       public Builder setStoreLimit(int value) {
16690         bitField0_ |= 0x00000400;
16691         storeLimit_ = value;
16692         onChanged();
16693         return this;
16694       }
16695       /**
16696        * <code>optional uint32 store_limit = 11;</code>
16697        */
clearStoreLimit()16698       public Builder clearStoreLimit() {
16699         bitField0_ = (bitField0_ & ~0x00000400);
16700         storeLimit_ = 0;
16701         onChanged();
16702         return this;
16703       }
16704 
16705       // optional uint32 store_offset = 12;
16706       private int storeOffset_ ;
16707       /**
16708        * <code>optional uint32 store_offset = 12;</code>
16709        */
hasStoreOffset()16710       public boolean hasStoreOffset() {
16711         return ((bitField0_ & 0x00000800) == 0x00000800);
16712       }
16713       /**
16714        * <code>optional uint32 store_offset = 12;</code>
16715        */
getStoreOffset()16716       public int getStoreOffset() {
16717         return storeOffset_;
16718       }
16719       /**
16720        * <code>optional uint32 store_offset = 12;</code>
16721        */
setStoreOffset(int value)16722       public Builder setStoreOffset(int value) {
16723         bitField0_ |= 0x00000800;
16724         storeOffset_ = value;
16725         onChanged();
16726         return this;
16727       }
16728       /**
16729        * <code>optional uint32 store_offset = 12;</code>
16730        */
clearStoreOffset()16731       public Builder clearStoreOffset() {
16732         bitField0_ = (bitField0_ & ~0x00000800);
16733         storeOffset_ = 0;
16734         onChanged();
16735         return this;
16736       }
16737 
16738       // optional bool load_column_families_on_demand = 13;
16739       private boolean loadColumnFamiliesOnDemand_ ;
16740       /**
16741        * <code>optional bool load_column_families_on_demand = 13;</code>
16742        *
16743        * <pre>
16744        * DO NOT add defaults to load_column_families_on_demand.
16745        * </pre>
16746        */
hasLoadColumnFamiliesOnDemand()16747       public boolean hasLoadColumnFamiliesOnDemand() {
16748         return ((bitField0_ & 0x00001000) == 0x00001000);
16749       }
16750       /**
16751        * <code>optional bool load_column_families_on_demand = 13;</code>
16752        *
16753        * <pre>
16754        * DO NOT add defaults to load_column_families_on_demand.
16755        * </pre>
16756        */
getLoadColumnFamiliesOnDemand()16757       public boolean getLoadColumnFamiliesOnDemand() {
16758         return loadColumnFamiliesOnDemand_;
16759       }
16760       /**
16761        * <code>optional bool load_column_families_on_demand = 13;</code>
16762        *
16763        * <pre>
16764        * DO NOT add defaults to load_column_families_on_demand.
16765        * </pre>
16766        */
setLoadColumnFamiliesOnDemand(boolean value)16767       public Builder setLoadColumnFamiliesOnDemand(boolean value) {
16768         bitField0_ |= 0x00001000;
16769         loadColumnFamiliesOnDemand_ = value;
16770         onChanged();
16771         return this;
16772       }
16773       /**
16774        * <code>optional bool load_column_families_on_demand = 13;</code>
16775        *
16776        * <pre>
16777        * DO NOT add defaults to load_column_families_on_demand.
16778        * </pre>
16779        */
clearLoadColumnFamiliesOnDemand()16780       public Builder clearLoadColumnFamiliesOnDemand() {
16781         bitField0_ = (bitField0_ & ~0x00001000);
16782         loadColumnFamiliesOnDemand_ = false;
16783         onChanged();
16784         return this;
16785       }
16786 
16787       // optional bool small = 14;
16788       private boolean small_ ;
16789       /**
16790        * <code>optional bool small = 14;</code>
16791        */
hasSmall()16792       public boolean hasSmall() {
16793         return ((bitField0_ & 0x00002000) == 0x00002000);
16794       }
16795       /**
16796        * <code>optional bool small = 14;</code>
16797        */
getSmall()16798       public boolean getSmall() {
16799         return small_;
16800       }
16801       /**
16802        * <code>optional bool small = 14;</code>
16803        */
setSmall(boolean value)16804       public Builder setSmall(boolean value) {
16805         bitField0_ |= 0x00002000;
16806         small_ = value;
16807         onChanged();
16808         return this;
16809       }
16810       /**
16811        * <code>optional bool small = 14;</code>
16812        */
clearSmall()16813       public Builder clearSmall() {
16814         bitField0_ = (bitField0_ & ~0x00002000);
16815         small_ = false;
16816         onChanged();
16817         return this;
16818       }
16819 
16820       // optional bool reversed = 15 [default = false];
16821       private boolean reversed_ ;
16822       /**
16823        * <code>optional bool reversed = 15 [default = false];</code>
16824        */
hasReversed()16825       public boolean hasReversed() {
16826         return ((bitField0_ & 0x00004000) == 0x00004000);
16827       }
16828       /**
16829        * <code>optional bool reversed = 15 [default = false];</code>
16830        */
getReversed()16831       public boolean getReversed() {
16832         return reversed_;
16833       }
16834       /**
16835        * <code>optional bool reversed = 15 [default = false];</code>
16836        */
setReversed(boolean value)16837       public Builder setReversed(boolean value) {
16838         bitField0_ |= 0x00004000;
16839         reversed_ = value;
16840         onChanged();
16841         return this;
16842       }
16843       /**
16844        * <code>optional bool reversed = 15 [default = false];</code>
16845        */
clearReversed()16846       public Builder clearReversed() {
16847         bitField0_ = (bitField0_ & ~0x00004000);
16848         reversed_ = false;
16849         onChanged();
16850         return this;
16851       }
16852 
16853       // optional .Consistency consistency = 16 [default = STRONG];
16854       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
16855       /**
16856        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16857        */
hasConsistency()16858       public boolean hasConsistency() {
16859         return ((bitField0_ & 0x00008000) == 0x00008000);
16860       }
16861       /**
16862        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16863        */
getConsistency()16864       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() {
16865         return consistency_;
16866       }
16867       /**
16868        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16869        */
setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value)16870       public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) {
16871         if (value == null) {
16872           throw new NullPointerException();
16873         }
16874         bitField0_ |= 0x00008000;
16875         consistency_ = value;
16876         onChanged();
16877         return this;
16878       }
16879       /**
16880        * <code>optional .Consistency consistency = 16 [default = STRONG];</code>
16881        */
clearConsistency()16882       public Builder clearConsistency() {
16883         bitField0_ = (bitField0_ & ~0x00008000);
16884         consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG;
16885         onChanged();
16886         return this;
16887       }
16888 
16889       // optional uint32 caching = 17;
16890       private int caching_ ;
16891       /**
16892        * <code>optional uint32 caching = 17;</code>
16893        */
hasCaching()16894       public boolean hasCaching() {
16895         return ((bitField0_ & 0x00010000) == 0x00010000);
16896       }
16897       /**
16898        * <code>optional uint32 caching = 17;</code>
16899        */
getCaching()16900       public int getCaching() {
16901         return caching_;
16902       }
16903       /**
16904        * <code>optional uint32 caching = 17;</code>
16905        */
setCaching(int value)16906       public Builder setCaching(int value) {
16907         bitField0_ |= 0x00010000;
16908         caching_ = value;
16909         onChanged();
16910         return this;
16911       }
16912       /**
16913        * <code>optional uint32 caching = 17;</code>
16914        */
clearCaching()16915       public Builder clearCaching() {
16916         bitField0_ = (bitField0_ & ~0x00010000);
16917         caching_ = 0;
16918         onChanged();
16919         return this;
16920       }
16921 
16922       // optional bool allow_partial_results = 18;
16923       private boolean allowPartialResults_ ;
16924       /**
16925        * <code>optional bool allow_partial_results = 18;</code>
16926        */
hasAllowPartialResults()16927       public boolean hasAllowPartialResults() {
16928         return ((bitField0_ & 0x00020000) == 0x00020000);
16929       }
16930       /**
16931        * <code>optional bool allow_partial_results = 18;</code>
16932        */
getAllowPartialResults()16933       public boolean getAllowPartialResults() {
16934         return allowPartialResults_;
16935       }
16936       /**
16937        * <code>optional bool allow_partial_results = 18;</code>
16938        */
setAllowPartialResults(boolean value)16939       public Builder setAllowPartialResults(boolean value) {
16940         bitField0_ |= 0x00020000;
16941         allowPartialResults_ = value;
16942         onChanged();
16943         return this;
16944       }
16945       /**
16946        * <code>optional bool allow_partial_results = 18;</code>
16947        */
clearAllowPartialResults()16948       public Builder clearAllowPartialResults() {
16949         bitField0_ = (bitField0_ & ~0x00020000);
16950         allowPartialResults_ = false;
16951         onChanged();
16952         return this;
16953       }
16954 
16955       // repeated .ColumnFamilyTimeRange cf_time_range = 19;
16956       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ =
16957         java.util.Collections.emptyList();
ensureCfTimeRangeIsMutable()16958       private void ensureCfTimeRangeIsMutable() {
16959         if (!((bitField0_ & 0x00040000) == 0x00040000)) {
16960           cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_);
16961           bitField0_ |= 0x00040000;
16962          }
16963       }
16964 
16965       private com.google.protobuf.RepeatedFieldBuilder<
16966           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_;
16967 
16968       /**
16969        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
16970        */
getCfTimeRangeList()16971       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() {
16972         if (cfTimeRangeBuilder_ == null) {
16973           return java.util.Collections.unmodifiableList(cfTimeRange_);
16974         } else {
16975           return cfTimeRangeBuilder_.getMessageList();
16976         }
16977       }
16978       /**
16979        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
16980        */
getCfTimeRangeCount()16981       public int getCfTimeRangeCount() {
16982         if (cfTimeRangeBuilder_ == null) {
16983           return cfTimeRange_.size();
16984         } else {
16985           return cfTimeRangeBuilder_.getCount();
16986         }
16987       }
16988       /**
16989        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
16990        */
getCfTimeRange(int index)16991       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) {
16992         if (cfTimeRangeBuilder_ == null) {
16993           return cfTimeRange_.get(index);
16994         } else {
16995           return cfTimeRangeBuilder_.getMessage(index);
16996         }
16997       }
16998       /**
16999        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17000        */
setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)17001       public Builder setCfTimeRange(
17002           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
17003         if (cfTimeRangeBuilder_ == null) {
17004           if (value == null) {
17005             throw new NullPointerException();
17006           }
17007           ensureCfTimeRangeIsMutable();
17008           cfTimeRange_.set(index, value);
17009           onChanged();
17010         } else {
17011           cfTimeRangeBuilder_.setMessage(index, value);
17012         }
17013         return this;
17014       }
17015       /**
17016        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17017        */
setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)17018       public Builder setCfTimeRange(
17019           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
17020         if (cfTimeRangeBuilder_ == null) {
17021           ensureCfTimeRangeIsMutable();
17022           cfTimeRange_.set(index, builderForValue.build());
17023           onChanged();
17024         } else {
17025           cfTimeRangeBuilder_.setMessage(index, builderForValue.build());
17026         }
17027         return this;
17028       }
17029       /**
17030        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17031        */
addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)17032       public Builder addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
17033         if (cfTimeRangeBuilder_ == null) {
17034           if (value == null) {
17035             throw new NullPointerException();
17036           }
17037           ensureCfTimeRangeIsMutable();
17038           cfTimeRange_.add(value);
17039           onChanged();
17040         } else {
17041           cfTimeRangeBuilder_.addMessage(value);
17042         }
17043         return this;
17044       }
17045       /**
17046        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17047        */
addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)17048       public Builder addCfTimeRange(
17049           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) {
17050         if (cfTimeRangeBuilder_ == null) {
17051           if (value == null) {
17052             throw new NullPointerException();
17053           }
17054           ensureCfTimeRangeIsMutable();
17055           cfTimeRange_.add(index, value);
17056           onChanged();
17057         } else {
17058           cfTimeRangeBuilder_.addMessage(index, value);
17059         }
17060         return this;
17061       }
17062       /**
17063        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17064        */
addCfTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)17065       public Builder addCfTimeRange(
17066           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
17067         if (cfTimeRangeBuilder_ == null) {
17068           ensureCfTimeRangeIsMutable();
17069           cfTimeRange_.add(builderForValue.build());
17070           onChanged();
17071         } else {
17072           cfTimeRangeBuilder_.addMessage(builderForValue.build());
17073         }
17074         return this;
17075       }
17076       /**
17077        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17078        */
addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)17079       public Builder addCfTimeRange(
17080           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) {
17081         if (cfTimeRangeBuilder_ == null) {
17082           ensureCfTimeRangeIsMutable();
17083           cfTimeRange_.add(index, builderForValue.build());
17084           onChanged();
17085         } else {
17086           cfTimeRangeBuilder_.addMessage(index, builderForValue.build());
17087         }
17088         return this;
17089       }
17090       /**
17091        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17092        */
addAllCfTimeRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values)17093       public Builder addAllCfTimeRange(
17094           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) {
17095         if (cfTimeRangeBuilder_ == null) {
17096           ensureCfTimeRangeIsMutable();
17097           super.addAll(values, cfTimeRange_);
17098           onChanged();
17099         } else {
17100           cfTimeRangeBuilder_.addAllMessages(values);
17101         }
17102         return this;
17103       }
17104       /**
17105        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17106        */
clearCfTimeRange()17107       public Builder clearCfTimeRange() {
17108         if (cfTimeRangeBuilder_ == null) {
17109           cfTimeRange_ = java.util.Collections.emptyList();
17110           bitField0_ = (bitField0_ & ~0x00040000);
17111           onChanged();
17112         } else {
17113           cfTimeRangeBuilder_.clear();
17114         }
17115         return this;
17116       }
17117       /**
17118        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17119        */
removeCfTimeRange(int index)17120       public Builder removeCfTimeRange(int index) {
17121         if (cfTimeRangeBuilder_ == null) {
17122           ensureCfTimeRangeIsMutable();
17123           cfTimeRange_.remove(index);
17124           onChanged();
17125         } else {
17126           cfTimeRangeBuilder_.remove(index);
17127         }
17128         return this;
17129       }
17130       /**
17131        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17132        */
getCfTimeRangeBuilder( int index)17133       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder(
17134           int index) {
17135         return getCfTimeRangeFieldBuilder().getBuilder(index);
17136       }
17137       /**
17138        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17139        */
getCfTimeRangeOrBuilder( int index)17140       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder(
17141           int index) {
17142         if (cfTimeRangeBuilder_ == null) {
17143           return cfTimeRange_.get(index);  } else {
17144           return cfTimeRangeBuilder_.getMessageOrBuilder(index);
17145         }
17146       }
17147       /**
17148        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17149        */
17150       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeOrBuilderList()17151            getCfTimeRangeOrBuilderList() {
17152         if (cfTimeRangeBuilder_ != null) {
17153           return cfTimeRangeBuilder_.getMessageOrBuilderList();
17154         } else {
17155           return java.util.Collections.unmodifiableList(cfTimeRange_);
17156         }
17157       }
17158       /**
17159        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17160        */
addCfTimeRangeBuilder()17161       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() {
17162         return getCfTimeRangeFieldBuilder().addBuilder(
17163             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
17164       }
17165       /**
17166        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17167        */
addCfTimeRangeBuilder( int index)17168       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder(
17169           int index) {
17170         return getCfTimeRangeFieldBuilder().addBuilder(
17171             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance());
17172       }
17173       /**
17174        * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code>
17175        */
17176       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder>
getCfTimeRangeBuilderList()17177            getCfTimeRangeBuilderList() {
17178         return getCfTimeRangeFieldBuilder().getBuilderList();
17179       }
17180       private com.google.protobuf.RepeatedFieldBuilder<
17181           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>
getCfTimeRangeFieldBuilder()17182           getCfTimeRangeFieldBuilder() {
17183         if (cfTimeRangeBuilder_ == null) {
17184           cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
17185               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>(
17186                   cfTimeRange_,
17187                   ((bitField0_ & 0x00040000) == 0x00040000),
17188                   getParentForChildren(),
17189                   isClean());
17190           cfTimeRange_ = null;
17191         }
17192         return cfTimeRangeBuilder_;
17193       }
17194 
17195       // @@protoc_insertion_point(builder_scope:Scan)
17196     }
17197 
17198     static {
17199       defaultInstance = new Scan(true);
defaultInstance.initFields()17200       defaultInstance.initFields();
17201     }
17202 
17203     // @@protoc_insertion_point(class_scope:Scan)
17204   }
17205 
17206   public interface ScanRequestOrBuilder
17207       extends com.google.protobuf.MessageOrBuilder {
17208 
17209     // optional .RegionSpecifier region = 1;
17210     /**
17211      * <code>optional .RegionSpecifier region = 1;</code>
17212      */
hasRegion()17213     boolean hasRegion();
17214     /**
17215      * <code>optional .RegionSpecifier region = 1;</code>
17216      */
getRegion()17217     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
17218     /**
17219      * <code>optional .RegionSpecifier region = 1;</code>
17220      */
getRegionOrBuilder()17221     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
17222 
17223     // optional .Scan scan = 2;
17224     /**
17225      * <code>optional .Scan scan = 2;</code>
17226      */
hasScan()17227     boolean hasScan();
17228     /**
17229      * <code>optional .Scan scan = 2;</code>
17230      */
getScan()17231     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
17232     /**
17233      * <code>optional .Scan scan = 2;</code>
17234      */
getScanOrBuilder()17235     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
17236 
17237     // optional uint64 scanner_id = 3;
17238     /**
17239      * <code>optional uint64 scanner_id = 3;</code>
17240      */
hasScannerId()17241     boolean hasScannerId();
17242     /**
17243      * <code>optional uint64 scanner_id = 3;</code>
17244      */
getScannerId()17245     long getScannerId();
17246 
17247     // optional uint32 number_of_rows = 4;
17248     /**
17249      * <code>optional uint32 number_of_rows = 4;</code>
17250      */
hasNumberOfRows()17251     boolean hasNumberOfRows();
17252     /**
17253      * <code>optional uint32 number_of_rows = 4;</code>
17254      */
getNumberOfRows()17255     int getNumberOfRows();
17256 
17257     // optional bool close_scanner = 5;
17258     /**
17259      * <code>optional bool close_scanner = 5;</code>
17260      */
hasCloseScanner()17261     boolean hasCloseScanner();
17262     /**
17263      * <code>optional bool close_scanner = 5;</code>
17264      */
getCloseScanner()17265     boolean getCloseScanner();
17266 
17267     // optional uint64 next_call_seq = 6;
17268     /**
17269      * <code>optional uint64 next_call_seq = 6;</code>
17270      */
hasNextCallSeq()17271     boolean hasNextCallSeq();
17272     /**
17273      * <code>optional uint64 next_call_seq = 6;</code>
17274      */
getNextCallSeq()17275     long getNextCallSeq();
17276 
17277     // optional bool client_handles_partials = 7;
17278     /**
17279      * <code>optional bool client_handles_partials = 7;</code>
17280      */
hasClientHandlesPartials()17281     boolean hasClientHandlesPartials();
17282     /**
17283      * <code>optional bool client_handles_partials = 7;</code>
17284      */
getClientHandlesPartials()17285     boolean getClientHandlesPartials();
17286 
17287     // optional bool client_handles_heartbeats = 8;
17288     /**
17289      * <code>optional bool client_handles_heartbeats = 8;</code>
17290      */
hasClientHandlesHeartbeats()17291     boolean hasClientHandlesHeartbeats();
17292     /**
17293      * <code>optional bool client_handles_heartbeats = 8;</code>
17294      */
getClientHandlesHeartbeats()17295     boolean getClientHandlesHeartbeats();
17296 
17297     // optional bool track_scan_metrics = 9;
17298     /**
17299      * <code>optional bool track_scan_metrics = 9;</code>
17300      */
hasTrackScanMetrics()17301     boolean hasTrackScanMetrics();
17302     /**
17303      * <code>optional bool track_scan_metrics = 9;</code>
17304      */
getTrackScanMetrics()17305     boolean getTrackScanMetrics();
17306 
17307     // optional bool renew = 10 [default = false];
17308     /**
17309      * <code>optional bool renew = 10 [default = false];</code>
17310      */
hasRenew()17311     boolean hasRenew();
17312     /**
17313      * <code>optional bool renew = 10 [default = false];</code>
17314      */
getRenew()17315     boolean getRenew();
17316   }
17317   /**
17318    * Protobuf type {@code ScanRequest}
17319    *
17320    * <pre>
17321    **
17322    * A scan request. Initially, it should specify a scan. Later on, you
17323    * can use the scanner id returned to fetch result batches with a different
17324    * scan request.
17325    *
17326    * The scanner will remain open if there are more results, and it's not
17327    * asked to be closed explicitly.
17328    *
17329    * You can fetch the results and ask the scanner to be closed to save
17330    * a trip if you are not interested in remaining results.
17331    * </pre>
17332    */
17333   public static final class ScanRequest extends
17334       com.google.protobuf.GeneratedMessage
17335       implements ScanRequestOrBuilder {
17336     // Use ScanRequest.newBuilder() to construct.
ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)17337     private ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
17338       super(builder);
17339       this.unknownFields = builder.getUnknownFields();
17340     }
ScanRequest(boolean noInit)17341     private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
17342 
17343     private static final ScanRequest defaultInstance;
getDefaultInstance()17344     public static ScanRequest getDefaultInstance() {
17345       return defaultInstance;
17346     }
17347 
getDefaultInstanceForType()17348     public ScanRequest getDefaultInstanceForType() {
17349       return defaultInstance;
17350     }
17351 
17352     private final com.google.protobuf.UnknownFieldSet unknownFields;
17353     @java.lang.Override
17354     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()17355         getUnknownFields() {
17356       return this.unknownFields;
17357     }
ScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17358     private ScanRequest(
17359         com.google.protobuf.CodedInputStream input,
17360         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17361         throws com.google.protobuf.InvalidProtocolBufferException {
17362       initFields();
17363       int mutable_bitField0_ = 0;
17364       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
17365           com.google.protobuf.UnknownFieldSet.newBuilder();
17366       try {
17367         boolean done = false;
17368         while (!done) {
17369           int tag = input.readTag();
17370           switch (tag) {
17371             case 0:
17372               done = true;
17373               break;
17374             default: {
17375               if (!parseUnknownField(input, unknownFields,
17376                                      extensionRegistry, tag)) {
17377                 done = true;
17378               }
17379               break;
17380             }
17381             case 10: {
17382               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
17383               if (((bitField0_ & 0x00000001) == 0x00000001)) {
17384                 subBuilder = region_.toBuilder();
17385               }
17386               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
17387               if (subBuilder != null) {
17388                 subBuilder.mergeFrom(region_);
17389                 region_ = subBuilder.buildPartial();
17390               }
17391               bitField0_ |= 0x00000001;
17392               break;
17393             }
17394             case 18: {
17395               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
17396               if (((bitField0_ & 0x00000002) == 0x00000002)) {
17397                 subBuilder = scan_.toBuilder();
17398               }
17399               scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
17400               if (subBuilder != null) {
17401                 subBuilder.mergeFrom(scan_);
17402                 scan_ = subBuilder.buildPartial();
17403               }
17404               bitField0_ |= 0x00000002;
17405               break;
17406             }
17407             case 24: {
17408               bitField0_ |= 0x00000004;
17409               scannerId_ = input.readUInt64();
17410               break;
17411             }
17412             case 32: {
17413               bitField0_ |= 0x00000008;
17414               numberOfRows_ = input.readUInt32();
17415               break;
17416             }
17417             case 40: {
17418               bitField0_ |= 0x00000010;
17419               closeScanner_ = input.readBool();
17420               break;
17421             }
17422             case 48: {
17423               bitField0_ |= 0x00000020;
17424               nextCallSeq_ = input.readUInt64();
17425               break;
17426             }
17427             case 56: {
17428               bitField0_ |= 0x00000040;
17429               clientHandlesPartials_ = input.readBool();
17430               break;
17431             }
17432             case 64: {
17433               bitField0_ |= 0x00000080;
17434               clientHandlesHeartbeats_ = input.readBool();
17435               break;
17436             }
17437             case 72: {
17438               bitField0_ |= 0x00000100;
17439               trackScanMetrics_ = input.readBool();
17440               break;
17441             }
17442             case 80: {
17443               bitField0_ |= 0x00000200;
17444               renew_ = input.readBool();
17445               break;
17446             }
17447           }
17448         }
17449       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
17450         throw e.setUnfinishedMessage(this);
17451       } catch (java.io.IOException e) {
17452         throw new com.google.protobuf.InvalidProtocolBufferException(
17453             e.getMessage()).setUnfinishedMessage(this);
17454       } finally {
17455         this.unknownFields = unknownFields.build();
17456         makeExtensionsImmutable();
17457       }
17458     }
17459     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17460         getDescriptor() {
17461       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor;
17462     }
17463 
17464     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17465         internalGetFieldAccessorTable() {
17466       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable
17467           .ensureFieldAccessorsInitialized(
17468               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
17469     }
17470 
17471     public static com.google.protobuf.Parser<ScanRequest> PARSER =
17472         new com.google.protobuf.AbstractParser<ScanRequest>() {
17473       public ScanRequest parsePartialFrom(
17474           com.google.protobuf.CodedInputStream input,
17475           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17476           throws com.google.protobuf.InvalidProtocolBufferException {
17477         return new ScanRequest(input, extensionRegistry);
17478       }
17479     };
17480 
17481     @java.lang.Override
getParserForType()17482     public com.google.protobuf.Parser<ScanRequest> getParserForType() {
17483       return PARSER;
17484     }
17485 
17486     private int bitField0_;
17487     // optional .RegionSpecifier region = 1;
17488     public static final int REGION_FIELD_NUMBER = 1;
17489     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
17490     /**
17491      * <code>optional .RegionSpecifier region = 1;</code>
17492      */
hasRegion()17493     public boolean hasRegion() {
17494       return ((bitField0_ & 0x00000001) == 0x00000001);
17495     }
17496     /**
17497      * <code>optional .RegionSpecifier region = 1;</code>
17498      */
getRegion()17499     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
17500       return region_;
17501     }
17502     /**
17503      * <code>optional .RegionSpecifier region = 1;</code>
17504      */
getRegionOrBuilder()17505     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
17506       return region_;
17507     }
17508 
17509     // optional .Scan scan = 2;
17510     public static final int SCAN_FIELD_NUMBER = 2;
17511     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
17512     /**
17513      * <code>optional .Scan scan = 2;</code>
17514      */
hasScan()17515     public boolean hasScan() {
17516       return ((bitField0_ & 0x00000002) == 0x00000002);
17517     }
17518     /**
17519      * <code>optional .Scan scan = 2;</code>
17520      */
getScan()17521     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
17522       return scan_;
17523     }
17524     /**
17525      * <code>optional .Scan scan = 2;</code>
17526      */
getScanOrBuilder()17527     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
17528       return scan_;
17529     }
17530 
17531     // optional uint64 scanner_id = 3;
17532     public static final int SCANNER_ID_FIELD_NUMBER = 3;
17533     private long scannerId_;
17534     /**
17535      * <code>optional uint64 scanner_id = 3;</code>
17536      */
hasScannerId()17537     public boolean hasScannerId() {
17538       return ((bitField0_ & 0x00000004) == 0x00000004);
17539     }
17540     /**
17541      * <code>optional uint64 scanner_id = 3;</code>
17542      */
getScannerId()17543     public long getScannerId() {
17544       return scannerId_;
17545     }
17546 
17547     // optional uint32 number_of_rows = 4;
17548     public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4;
17549     private int numberOfRows_;
17550     /**
17551      * <code>optional uint32 number_of_rows = 4;</code>
17552      */
hasNumberOfRows()17553     public boolean hasNumberOfRows() {
17554       return ((bitField0_ & 0x00000008) == 0x00000008);
17555     }
17556     /**
17557      * <code>optional uint32 number_of_rows = 4;</code>
17558      */
getNumberOfRows()17559     public int getNumberOfRows() {
17560       return numberOfRows_;
17561     }
17562 
17563     // optional bool close_scanner = 5;
17564     public static final int CLOSE_SCANNER_FIELD_NUMBER = 5;
17565     private boolean closeScanner_;
17566     /**
17567      * <code>optional bool close_scanner = 5;</code>
17568      */
hasCloseScanner()17569     public boolean hasCloseScanner() {
17570       return ((bitField0_ & 0x00000010) == 0x00000010);
17571     }
17572     /**
17573      * <code>optional bool close_scanner = 5;</code>
17574      */
getCloseScanner()17575     public boolean getCloseScanner() {
17576       return closeScanner_;
17577     }
17578 
17579     // optional uint64 next_call_seq = 6;
17580     public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6;
17581     private long nextCallSeq_;
17582     /**
17583      * <code>optional uint64 next_call_seq = 6;</code>
17584      */
hasNextCallSeq()17585     public boolean hasNextCallSeq() {
17586       return ((bitField0_ & 0x00000020) == 0x00000020);
17587     }
17588     /**
17589      * <code>optional uint64 next_call_seq = 6;</code>
17590      */
getNextCallSeq()17591     public long getNextCallSeq() {
17592       return nextCallSeq_;
17593     }
17594 
17595     // optional bool client_handles_partials = 7;
17596     public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7;
17597     private boolean clientHandlesPartials_;
17598     /**
17599      * <code>optional bool client_handles_partials = 7;</code>
17600      */
hasClientHandlesPartials()17601     public boolean hasClientHandlesPartials() {
17602       return ((bitField0_ & 0x00000040) == 0x00000040);
17603     }
17604     /**
17605      * <code>optional bool client_handles_partials = 7;</code>
17606      */
getClientHandlesPartials()17607     public boolean getClientHandlesPartials() {
17608       return clientHandlesPartials_;
17609     }
17610 
17611     // optional bool client_handles_heartbeats = 8;
17612     public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8;
17613     private boolean clientHandlesHeartbeats_;
17614     /**
17615      * <code>optional bool client_handles_heartbeats = 8;</code>
17616      */
hasClientHandlesHeartbeats()17617     public boolean hasClientHandlesHeartbeats() {
17618       return ((bitField0_ & 0x00000080) == 0x00000080);
17619     }
17620     /**
17621      * <code>optional bool client_handles_heartbeats = 8;</code>
17622      */
getClientHandlesHeartbeats()17623     public boolean getClientHandlesHeartbeats() {
17624       return clientHandlesHeartbeats_;
17625     }
17626 
17627     // optional bool track_scan_metrics = 9;
17628     public static final int TRACK_SCAN_METRICS_FIELD_NUMBER = 9;
17629     private boolean trackScanMetrics_;
17630     /**
17631      * <code>optional bool track_scan_metrics = 9;</code>
17632      */
hasTrackScanMetrics()17633     public boolean hasTrackScanMetrics() {
17634       return ((bitField0_ & 0x00000100) == 0x00000100);
17635     }
17636     /**
17637      * <code>optional bool track_scan_metrics = 9;</code>
17638      */
getTrackScanMetrics()17639     public boolean getTrackScanMetrics() {
17640       return trackScanMetrics_;
17641     }
17642 
17643     // optional bool renew = 10 [default = false];
17644     public static final int RENEW_FIELD_NUMBER = 10;
17645     private boolean renew_;
17646     /**
17647      * <code>optional bool renew = 10 [default = false];</code>
17648      */
hasRenew()17649     public boolean hasRenew() {
17650       return ((bitField0_ & 0x00000200) == 0x00000200);
17651     }
17652     /**
17653      * <code>optional bool renew = 10 [default = false];</code>
17654      */
getRenew()17655     public boolean getRenew() {
17656       return renew_;
17657     }
17658 
initFields()17659     private void initFields() {
17660       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
17661       scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
17662       scannerId_ = 0L;
17663       numberOfRows_ = 0;
17664       closeScanner_ = false;
17665       nextCallSeq_ = 0L;
17666       clientHandlesPartials_ = false;
17667       clientHandlesHeartbeats_ = false;
17668       trackScanMetrics_ = false;
17669       renew_ = false;
17670     }
17671     private byte memoizedIsInitialized = -1;
isInitialized()17672     public final boolean isInitialized() {
17673       byte isInitialized = memoizedIsInitialized;
17674       if (isInitialized != -1) return isInitialized == 1;
17675 
17676       if (hasRegion()) {
17677         if (!getRegion().isInitialized()) {
17678           memoizedIsInitialized = 0;
17679           return false;
17680         }
17681       }
17682       if (hasScan()) {
17683         if (!getScan().isInitialized()) {
17684           memoizedIsInitialized = 0;
17685           return false;
17686         }
17687       }
17688       memoizedIsInitialized = 1;
17689       return true;
17690     }
17691 
writeTo(com.google.protobuf.CodedOutputStream output)17692     public void writeTo(com.google.protobuf.CodedOutputStream output)
17693                         throws java.io.IOException {
17694       getSerializedSize();
17695       if (((bitField0_ & 0x00000001) == 0x00000001)) {
17696         output.writeMessage(1, region_);
17697       }
17698       if (((bitField0_ & 0x00000002) == 0x00000002)) {
17699         output.writeMessage(2, scan_);
17700       }
17701       if (((bitField0_ & 0x00000004) == 0x00000004)) {
17702         output.writeUInt64(3, scannerId_);
17703       }
17704       if (((bitField0_ & 0x00000008) == 0x00000008)) {
17705         output.writeUInt32(4, numberOfRows_);
17706       }
17707       if (((bitField0_ & 0x00000010) == 0x00000010)) {
17708         output.writeBool(5, closeScanner_);
17709       }
17710       if (((bitField0_ & 0x00000020) == 0x00000020)) {
17711         output.writeUInt64(6, nextCallSeq_);
17712       }
17713       if (((bitField0_ & 0x00000040) == 0x00000040)) {
17714         output.writeBool(7, clientHandlesPartials_);
17715       }
17716       if (((bitField0_ & 0x00000080) == 0x00000080)) {
17717         output.writeBool(8, clientHandlesHeartbeats_);
17718       }
17719       if (((bitField0_ & 0x00000100) == 0x00000100)) {
17720         output.writeBool(9, trackScanMetrics_);
17721       }
17722       if (((bitField0_ & 0x00000200) == 0x00000200)) {
17723         output.writeBool(10, renew_);
17724       }
17725       getUnknownFields().writeTo(output);
17726     }
17727 
17728     private int memoizedSerializedSize = -1;
getSerializedSize()17729     public int getSerializedSize() {
17730       int size = memoizedSerializedSize;
17731       if (size != -1) return size;
17732 
17733       size = 0;
17734       if (((bitField0_ & 0x00000001) == 0x00000001)) {
17735         size += com.google.protobuf.CodedOutputStream
17736           .computeMessageSize(1, region_);
17737       }
17738       if (((bitField0_ & 0x00000002) == 0x00000002)) {
17739         size += com.google.protobuf.CodedOutputStream
17740           .computeMessageSize(2, scan_);
17741       }
17742       if (((bitField0_ & 0x00000004) == 0x00000004)) {
17743         size += com.google.protobuf.CodedOutputStream
17744           .computeUInt64Size(3, scannerId_);
17745       }
17746       if (((bitField0_ & 0x00000008) == 0x00000008)) {
17747         size += com.google.protobuf.CodedOutputStream
17748           .computeUInt32Size(4, numberOfRows_);
17749       }
17750       if (((bitField0_ & 0x00000010) == 0x00000010)) {
17751         size += com.google.protobuf.CodedOutputStream
17752           .computeBoolSize(5, closeScanner_);
17753       }
17754       if (((bitField0_ & 0x00000020) == 0x00000020)) {
17755         size += com.google.protobuf.CodedOutputStream
17756           .computeUInt64Size(6, nextCallSeq_);
17757       }
17758       if (((bitField0_ & 0x00000040) == 0x00000040)) {
17759         size += com.google.protobuf.CodedOutputStream
17760           .computeBoolSize(7, clientHandlesPartials_);
17761       }
17762       if (((bitField0_ & 0x00000080) == 0x00000080)) {
17763         size += com.google.protobuf.CodedOutputStream
17764           .computeBoolSize(8, clientHandlesHeartbeats_);
17765       }
17766       if (((bitField0_ & 0x00000100) == 0x00000100)) {
17767         size += com.google.protobuf.CodedOutputStream
17768           .computeBoolSize(9, trackScanMetrics_);
17769       }
17770       if (((bitField0_ & 0x00000200) == 0x00000200)) {
17771         size += com.google.protobuf.CodedOutputStream
17772           .computeBoolSize(10, renew_);
17773       }
17774       size += getUnknownFields().getSerializedSize();
17775       memoizedSerializedSize = size;
17776       return size;
17777     }
17778 
17779     private static final long serialVersionUID = 0L;
17780     @java.lang.Override
writeReplace()17781     protected java.lang.Object writeReplace()
17782         throws java.io.ObjectStreamException {
17783       return super.writeReplace();
17784     }
17785 
17786     @java.lang.Override
equals(final java.lang.Object obj)17787     public boolean equals(final java.lang.Object obj) {
17788       if (obj == this) {
17789        return true;
17790       }
17791       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) {
17792         return super.equals(obj);
17793       }
17794       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj;
17795 
17796       boolean result = true;
17797       result = result && (hasRegion() == other.hasRegion());
17798       if (hasRegion()) {
17799         result = result && getRegion()
17800             .equals(other.getRegion());
17801       }
17802       result = result && (hasScan() == other.hasScan());
17803       if (hasScan()) {
17804         result = result && getScan()
17805             .equals(other.getScan());
17806       }
17807       result = result && (hasScannerId() == other.hasScannerId());
17808       if (hasScannerId()) {
17809         result = result && (getScannerId()
17810             == other.getScannerId());
17811       }
17812       result = result && (hasNumberOfRows() == other.hasNumberOfRows());
17813       if (hasNumberOfRows()) {
17814         result = result && (getNumberOfRows()
17815             == other.getNumberOfRows());
17816       }
17817       result = result && (hasCloseScanner() == other.hasCloseScanner());
17818       if (hasCloseScanner()) {
17819         result = result && (getCloseScanner()
17820             == other.getCloseScanner());
17821       }
17822       result = result && (hasNextCallSeq() == other.hasNextCallSeq());
17823       if (hasNextCallSeq()) {
17824         result = result && (getNextCallSeq()
17825             == other.getNextCallSeq());
17826       }
17827       result = result && (hasClientHandlesPartials() == other.hasClientHandlesPartials());
17828       if (hasClientHandlesPartials()) {
17829         result = result && (getClientHandlesPartials()
17830             == other.getClientHandlesPartials());
17831       }
17832       result = result && (hasClientHandlesHeartbeats() == other.hasClientHandlesHeartbeats());
17833       if (hasClientHandlesHeartbeats()) {
17834         result = result && (getClientHandlesHeartbeats()
17835             == other.getClientHandlesHeartbeats());
17836       }
17837       result = result && (hasTrackScanMetrics() == other.hasTrackScanMetrics());
17838       if (hasTrackScanMetrics()) {
17839         result = result && (getTrackScanMetrics()
17840             == other.getTrackScanMetrics());
17841       }
17842       result = result && (hasRenew() == other.hasRenew());
17843       if (hasRenew()) {
17844         result = result && (getRenew()
17845             == other.getRenew());
17846       }
17847       result = result &&
17848           getUnknownFields().equals(other.getUnknownFields());
17849       return result;
17850     }
17851 
17852     private int memoizedHashCode = 0;
17853     @java.lang.Override
hashCode()17854     public int hashCode() {
17855       if (memoizedHashCode != 0) {
17856         return memoizedHashCode;
17857       }
17858       int hash = 41;
17859       hash = (19 * hash) + getDescriptorForType().hashCode();
17860       if (hasRegion()) {
17861         hash = (37 * hash) + REGION_FIELD_NUMBER;
17862         hash = (53 * hash) + getRegion().hashCode();
17863       }
17864       if (hasScan()) {
17865         hash = (37 * hash) + SCAN_FIELD_NUMBER;
17866         hash = (53 * hash) + getScan().hashCode();
17867       }
17868       if (hasScannerId()) {
17869         hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
17870         hash = (53 * hash) + hashLong(getScannerId());
17871       }
17872       if (hasNumberOfRows()) {
17873         hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER;
17874         hash = (53 * hash) + getNumberOfRows();
17875       }
17876       if (hasCloseScanner()) {
17877         hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER;
17878         hash = (53 * hash) + hashBoolean(getCloseScanner());
17879       }
17880       if (hasNextCallSeq()) {
17881         hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER;
17882         hash = (53 * hash) + hashLong(getNextCallSeq());
17883       }
17884       if (hasClientHandlesPartials()) {
17885         hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER;
17886         hash = (53 * hash) + hashBoolean(getClientHandlesPartials());
17887       }
17888       if (hasClientHandlesHeartbeats()) {
17889         hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER;
17890         hash = (53 * hash) + hashBoolean(getClientHandlesHeartbeats());
17891       }
17892       if (hasTrackScanMetrics()) {
17893         hash = (37 * hash) + TRACK_SCAN_METRICS_FIELD_NUMBER;
17894         hash = (53 * hash) + hashBoolean(getTrackScanMetrics());
17895       }
17896       if (hasRenew()) {
17897         hash = (37 * hash) + RENEW_FIELD_NUMBER;
17898         hash = (53 * hash) + hashBoolean(getRenew());
17899       }
17900       hash = (29 * hash) + getUnknownFields().hashCode();
17901       memoizedHashCode = hash;
17902       return hash;
17903     }
17904 
parseFrom( com.google.protobuf.ByteString data)17905     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17906         com.google.protobuf.ByteString data)
17907         throws com.google.protobuf.InvalidProtocolBufferException {
17908       return PARSER.parseFrom(data);
17909     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17910     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17911         com.google.protobuf.ByteString data,
17912         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17913         throws com.google.protobuf.InvalidProtocolBufferException {
17914       return PARSER.parseFrom(data, extensionRegistry);
17915     }
parseFrom(byte[] data)17916     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data)
17917         throws com.google.protobuf.InvalidProtocolBufferException {
17918       return PARSER.parseFrom(data);
17919     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17920     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17921         byte[] data,
17922         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17923         throws com.google.protobuf.InvalidProtocolBufferException {
17924       return PARSER.parseFrom(data, extensionRegistry);
17925     }
parseFrom(java.io.InputStream input)17926     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input)
17927         throws java.io.IOException {
17928       return PARSER.parseFrom(input);
17929     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17930     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17931         java.io.InputStream input,
17932         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17933         throws java.io.IOException {
17934       return PARSER.parseFrom(input, extensionRegistry);
17935     }
parseDelimitedFrom(java.io.InputStream input)17936     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input)
17937         throws java.io.IOException {
17938       return PARSER.parseDelimitedFrom(input);
17939     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17940     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(
17941         java.io.InputStream input,
17942         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17943         throws java.io.IOException {
17944       return PARSER.parseDelimitedFrom(input, extensionRegistry);
17945     }
parseFrom( com.google.protobuf.CodedInputStream input)17946     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17947         com.google.protobuf.CodedInputStream input)
17948         throws java.io.IOException {
17949       return PARSER.parseFrom(input);
17950     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17951     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(
17952         com.google.protobuf.CodedInputStream input,
17953         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
17954         throws java.io.IOException {
17955       return PARSER.parseFrom(input, extensionRegistry);
17956     }
17957 
newBuilder()17958     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()17959     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype)17960     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) {
17961       return newBuilder().mergeFrom(prototype);
17962     }
toBuilder()17963     public Builder toBuilder() { return newBuilder(this); }
17964 
17965     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)17966     protected Builder newBuilderForType(
17967         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
17968       Builder builder = new Builder(parent);
17969       return builder;
17970     }
17971     /**
17972      * Protobuf type {@code ScanRequest}
17973      *
17974      * <pre>
17975      **
17976      * A scan request. Initially, it should specify a scan. Later on, you
17977      * can use the scanner id returned to fetch result batches with a different
17978      * scan request.
17979      *
17980      * The scanner will remain open if there are more results, and it's not
17981      * asked to be closed explicitly.
17982      *
17983      * You can fetch the results and ask the scanner to be closed to save
17984      * a trip if you are not interested in remaining results.
17985      * </pre>
17986      */
17987     public static final class Builder extends
17988         com.google.protobuf.GeneratedMessage.Builder<Builder>
17989        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder {
17990       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()17991           getDescriptor() {
17992         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor;
17993       }
17994 
17995       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()17996           internalGetFieldAccessorTable() {
17997         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable
17998             .ensureFieldAccessorsInitialized(
17999                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class);
18000       }
18001 
18002       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder()
Builder()18003       private Builder() {
18004         maybeForceBuilderInitialization();
18005       }
18006 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)18007       private Builder(
18008           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
18009         super(parent);
18010         maybeForceBuilderInitialization();
18011       }
maybeForceBuilderInitialization()18012       private void maybeForceBuilderInitialization() {
18013         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
18014           getRegionFieldBuilder();
18015           getScanFieldBuilder();
18016         }
18017       }
create()18018       private static Builder create() {
18019         return new Builder();
18020       }
18021 
clear()18022       public Builder clear() {
18023         super.clear();
18024         if (regionBuilder_ == null) {
18025           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
18026         } else {
18027           regionBuilder_.clear();
18028         }
18029         bitField0_ = (bitField0_ & ~0x00000001);
18030         if (scanBuilder_ == null) {
18031           scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
18032         } else {
18033           scanBuilder_.clear();
18034         }
18035         bitField0_ = (bitField0_ & ~0x00000002);
18036         scannerId_ = 0L;
18037         bitField0_ = (bitField0_ & ~0x00000004);
18038         numberOfRows_ = 0;
18039         bitField0_ = (bitField0_ & ~0x00000008);
18040         closeScanner_ = false;
18041         bitField0_ = (bitField0_ & ~0x00000010);
18042         nextCallSeq_ = 0L;
18043         bitField0_ = (bitField0_ & ~0x00000020);
18044         clientHandlesPartials_ = false;
18045         bitField0_ = (bitField0_ & ~0x00000040);
18046         clientHandlesHeartbeats_ = false;
18047         bitField0_ = (bitField0_ & ~0x00000080);
18048         trackScanMetrics_ = false;
18049         bitField0_ = (bitField0_ & ~0x00000100);
18050         renew_ = false;
18051         bitField0_ = (bitField0_ & ~0x00000200);
18052         return this;
18053       }
18054 
clone()18055       public Builder clone() {
18056         return create().mergeFrom(buildPartial());
18057       }
18058 
18059       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()18060           getDescriptorForType() {
18061         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor;
18062       }
18063 
getDefaultInstanceForType()18064       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() {
18065         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
18066       }
18067 
build()18068       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() {
18069         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial();
18070         if (!result.isInitialized()) {
18071           throw newUninitializedMessageException(result);
18072         }
18073         return result;
18074       }
18075 
buildPartial()18076       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() {
18077         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this);
18078         int from_bitField0_ = bitField0_;
18079         int to_bitField0_ = 0;
18080         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
18081           to_bitField0_ |= 0x00000001;
18082         }
18083         if (regionBuilder_ == null) {
18084           result.region_ = region_;
18085         } else {
18086           result.region_ = regionBuilder_.build();
18087         }
18088         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
18089           to_bitField0_ |= 0x00000002;
18090         }
18091         if (scanBuilder_ == null) {
18092           result.scan_ = scan_;
18093         } else {
18094           result.scan_ = scanBuilder_.build();
18095         }
18096         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
18097           to_bitField0_ |= 0x00000004;
18098         }
18099         result.scannerId_ = scannerId_;
18100         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
18101           to_bitField0_ |= 0x00000008;
18102         }
18103         result.numberOfRows_ = numberOfRows_;
18104         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
18105           to_bitField0_ |= 0x00000010;
18106         }
18107         result.closeScanner_ = closeScanner_;
18108         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
18109           to_bitField0_ |= 0x00000020;
18110         }
18111         result.nextCallSeq_ = nextCallSeq_;
18112         if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
18113           to_bitField0_ |= 0x00000040;
18114         }
18115         result.clientHandlesPartials_ = clientHandlesPartials_;
18116         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
18117           to_bitField0_ |= 0x00000080;
18118         }
18119         result.clientHandlesHeartbeats_ = clientHandlesHeartbeats_;
18120         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
18121           to_bitField0_ |= 0x00000100;
18122         }
18123         result.trackScanMetrics_ = trackScanMetrics_;
18124         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
18125           to_bitField0_ |= 0x00000200;
18126         }
18127         result.renew_ = renew_;
18128         result.bitField0_ = to_bitField0_;
18129         onBuilt();
18130         return result;
18131       }
18132 
mergeFrom(com.google.protobuf.Message other)18133       public Builder mergeFrom(com.google.protobuf.Message other) {
18134         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) {
18135           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other);
18136         } else {
18137           super.mergeFrom(other);
18138           return this;
18139         }
18140       }
18141 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other)18142       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) {
18143         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this;
18144         if (other.hasRegion()) {
18145           mergeRegion(other.getRegion());
18146         }
18147         if (other.hasScan()) {
18148           mergeScan(other.getScan());
18149         }
18150         if (other.hasScannerId()) {
18151           setScannerId(other.getScannerId());
18152         }
18153         if (other.hasNumberOfRows()) {
18154           setNumberOfRows(other.getNumberOfRows());
18155         }
18156         if (other.hasCloseScanner()) {
18157           setCloseScanner(other.getCloseScanner());
18158         }
18159         if (other.hasNextCallSeq()) {
18160           setNextCallSeq(other.getNextCallSeq());
18161         }
18162         if (other.hasClientHandlesPartials()) {
18163           setClientHandlesPartials(other.getClientHandlesPartials());
18164         }
18165         if (other.hasClientHandlesHeartbeats()) {
18166           setClientHandlesHeartbeats(other.getClientHandlesHeartbeats());
18167         }
18168         if (other.hasTrackScanMetrics()) {
18169           setTrackScanMetrics(other.getTrackScanMetrics());
18170         }
18171         if (other.hasRenew()) {
18172           setRenew(other.getRenew());
18173         }
18174         this.mergeUnknownFields(other.getUnknownFields());
18175         return this;
18176       }
18177 
isInitialized()18178       public final boolean isInitialized() {
18179         if (hasRegion()) {
18180           if (!getRegion().isInitialized()) {
18181 
18182             return false;
18183           }
18184         }
18185         if (hasScan()) {
18186           if (!getScan().isInitialized()) {
18187 
18188             return false;
18189           }
18190         }
18191         return true;
18192       }
18193 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18194       public Builder mergeFrom(
18195           com.google.protobuf.CodedInputStream input,
18196           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
18197           throws java.io.IOException {
18198         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null;
18199         try {
18200           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
18201         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
18202           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage();
18203           throw e;
18204         } finally {
18205           if (parsedMessage != null) {
18206             mergeFrom(parsedMessage);
18207           }
18208         }
18209         return this;
18210       }
18211       private int bitField0_;
18212 
18213       // optional .RegionSpecifier region = 1;
18214       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
18215       private com.google.protobuf.SingleFieldBuilder<
18216           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
18217       /**
18218        * <code>optional .RegionSpecifier region = 1;</code>
18219        */
hasRegion()18220       public boolean hasRegion() {
18221         return ((bitField0_ & 0x00000001) == 0x00000001);
18222       }
18223       /**
18224        * <code>optional .RegionSpecifier region = 1;</code>
18225        */
getRegion()18226       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
18227         if (regionBuilder_ == null) {
18228           return region_;
18229         } else {
18230           return regionBuilder_.getMessage();
18231         }
18232       }
18233       /**
18234        * <code>optional .RegionSpecifier region = 1;</code>
18235        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)18236       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
18237         if (regionBuilder_ == null) {
18238           if (value == null) {
18239             throw new NullPointerException();
18240           }
18241           region_ = value;
18242           onChanged();
18243         } else {
18244           regionBuilder_.setMessage(value);
18245         }
18246         bitField0_ |= 0x00000001;
18247         return this;
18248       }
18249       /**
18250        * <code>optional .RegionSpecifier region = 1;</code>
18251        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)18252       public Builder setRegion(
18253           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
18254         if (regionBuilder_ == null) {
18255           region_ = builderForValue.build();
18256           onChanged();
18257         } else {
18258           regionBuilder_.setMessage(builderForValue.build());
18259         }
18260         bitField0_ |= 0x00000001;
18261         return this;
18262       }
18263       /**
18264        * <code>optional .RegionSpecifier region = 1;</code>
18265        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)18266       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
18267         if (regionBuilder_ == null) {
18268           if (((bitField0_ & 0x00000001) == 0x00000001) &&
18269               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
18270             region_ =
18271               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
18272           } else {
18273             region_ = value;
18274           }
18275           onChanged();
18276         } else {
18277           regionBuilder_.mergeFrom(value);
18278         }
18279         bitField0_ |= 0x00000001;
18280         return this;
18281       }
18282       /**
18283        * <code>optional .RegionSpecifier region = 1;</code>
18284        */
clearRegion()18285       public Builder clearRegion() {
18286         if (regionBuilder_ == null) {
18287           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
18288           onChanged();
18289         } else {
18290           regionBuilder_.clear();
18291         }
18292         bitField0_ = (bitField0_ & ~0x00000001);
18293         return this;
18294       }
18295       /**
18296        * <code>optional .RegionSpecifier region = 1;</code>
18297        */
getRegionBuilder()18298       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
18299         bitField0_ |= 0x00000001;
18300         onChanged();
18301         return getRegionFieldBuilder().getBuilder();
18302       }
18303       /**
18304        * <code>optional .RegionSpecifier region = 1;</code>
18305        */
getRegionOrBuilder()18306       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
18307         if (regionBuilder_ != null) {
18308           return regionBuilder_.getMessageOrBuilder();
18309         } else {
18310           return region_;
18311         }
18312       }
18313       /**
18314        * <code>optional .RegionSpecifier region = 1;</code>
18315        */
18316       private com.google.protobuf.SingleFieldBuilder<
18317           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()18318           getRegionFieldBuilder() {
18319         if (regionBuilder_ == null) {
18320           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
18321               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
18322                   region_,
18323                   getParentForChildren(),
18324                   isClean());
18325           region_ = null;
18326         }
18327         return regionBuilder_;
18328       }
18329 
18330       // optional .Scan scan = 2;
18331       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
18332       private com.google.protobuf.SingleFieldBuilder<
18333           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
18334       /**
18335        * <code>optional .Scan scan = 2;</code>
18336        */
hasScan()18337       public boolean hasScan() {
18338         return ((bitField0_ & 0x00000002) == 0x00000002);
18339       }
18340       /**
18341        * <code>optional .Scan scan = 2;</code>
18342        */
getScan()18343       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
18344         if (scanBuilder_ == null) {
18345           return scan_;
18346         } else {
18347           return scanBuilder_.getMessage();
18348         }
18349       }
18350       /**
18351        * <code>optional .Scan scan = 2;</code>
18352        */
setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value)18353       public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
18354         if (scanBuilder_ == null) {
18355           if (value == null) {
18356             throw new NullPointerException();
18357           }
18358           scan_ = value;
18359           onChanged();
18360         } else {
18361           scanBuilder_.setMessage(value);
18362         }
18363         bitField0_ |= 0x00000002;
18364         return this;
18365       }
18366       /**
18367        * <code>optional .Scan scan = 2;</code>
18368        */
setScan( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue)18369       public Builder setScan(
18370           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
18371         if (scanBuilder_ == null) {
18372           scan_ = builderForValue.build();
18373           onChanged();
18374         } else {
18375           scanBuilder_.setMessage(builderForValue.build());
18376         }
18377         bitField0_ |= 0x00000002;
18378         return this;
18379       }
18380       /**
18381        * <code>optional .Scan scan = 2;</code>
18382        */
mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value)18383       public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
18384         if (scanBuilder_ == null) {
18385           if (((bitField0_ & 0x00000002) == 0x00000002) &&
18386               scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
18387             scan_ =
18388               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
18389           } else {
18390             scan_ = value;
18391           }
18392           onChanged();
18393         } else {
18394           scanBuilder_.mergeFrom(value);
18395         }
18396         bitField0_ |= 0x00000002;
18397         return this;
18398       }
18399       /**
18400        * <code>optional .Scan scan = 2;</code>
18401        */
clearScan()18402       public Builder clearScan() {
18403         if (scanBuilder_ == null) {
18404           scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
18405           onChanged();
18406         } else {
18407           scanBuilder_.clear();
18408         }
18409         bitField0_ = (bitField0_ & ~0x00000002);
18410         return this;
18411       }
18412       /**
18413        * <code>optional .Scan scan = 2;</code>
18414        */
getScanBuilder()18415       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
18416         bitField0_ |= 0x00000002;
18417         onChanged();
18418         return getScanFieldBuilder().getBuilder();
18419       }
18420       /**
18421        * <code>optional .Scan scan = 2;</code>
18422        */
getScanOrBuilder()18423       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
18424         if (scanBuilder_ != null) {
18425           return scanBuilder_.getMessageOrBuilder();
18426         } else {
18427           return scan_;
18428         }
18429       }
18430       /**
18431        * <code>optional .Scan scan = 2;</code>
18432        */
18433       private com.google.protobuf.SingleFieldBuilder<
18434           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
getScanFieldBuilder()18435           getScanFieldBuilder() {
18436         if (scanBuilder_ == null) {
18437           scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
18438               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
18439                   scan_,
18440                   getParentForChildren(),
18441                   isClean());
18442           scan_ = null;
18443         }
18444         return scanBuilder_;
18445       }
18446 
18447       // optional uint64 scanner_id = 3;
18448       private long scannerId_ ;
18449       /**
18450        * <code>optional uint64 scanner_id = 3;</code>
18451        */
hasScannerId()18452       public boolean hasScannerId() {
18453         return ((bitField0_ & 0x00000004) == 0x00000004);
18454       }
18455       /**
18456        * <code>optional uint64 scanner_id = 3;</code>
18457        */
getScannerId()18458       public long getScannerId() {
18459         return scannerId_;
18460       }
18461       /**
18462        * <code>optional uint64 scanner_id = 3;</code>
18463        */
setScannerId(long value)18464       public Builder setScannerId(long value) {
18465         bitField0_ |= 0x00000004;
18466         scannerId_ = value;
18467         onChanged();
18468         return this;
18469       }
18470       /**
18471        * <code>optional uint64 scanner_id = 3;</code>
18472        */
clearScannerId()18473       public Builder clearScannerId() {
18474         bitField0_ = (bitField0_ & ~0x00000004);
18475         scannerId_ = 0L;
18476         onChanged();
18477         return this;
18478       }
18479 
18480       // optional uint32 number_of_rows = 4;
18481       private int numberOfRows_ ;
18482       /**
18483        * <code>optional uint32 number_of_rows = 4;</code>
18484        */
hasNumberOfRows()18485       public boolean hasNumberOfRows() {
18486         return ((bitField0_ & 0x00000008) == 0x00000008);
18487       }
18488       /**
18489        * <code>optional uint32 number_of_rows = 4;</code>
18490        */
getNumberOfRows()18491       public int getNumberOfRows() {
18492         return numberOfRows_;
18493       }
18494       /**
18495        * <code>optional uint32 number_of_rows = 4;</code>
18496        */
setNumberOfRows(int value)18497       public Builder setNumberOfRows(int value) {
18498         bitField0_ |= 0x00000008;
18499         numberOfRows_ = value;
18500         onChanged();
18501         return this;
18502       }
18503       /**
18504        * <code>optional uint32 number_of_rows = 4;</code>
18505        */
clearNumberOfRows()18506       public Builder clearNumberOfRows() {
18507         bitField0_ = (bitField0_ & ~0x00000008);
18508         numberOfRows_ = 0;
18509         onChanged();
18510         return this;
18511       }
18512 
18513       // optional bool close_scanner = 5;
18514       private boolean closeScanner_ ;
18515       /**
18516        * <code>optional bool close_scanner = 5;</code>
18517        */
hasCloseScanner()18518       public boolean hasCloseScanner() {
18519         return ((bitField0_ & 0x00000010) == 0x00000010);
18520       }
18521       /**
18522        * <code>optional bool close_scanner = 5;</code>
18523        */
getCloseScanner()18524       public boolean getCloseScanner() {
18525         return closeScanner_;
18526       }
18527       /**
18528        * <code>optional bool close_scanner = 5;</code>
18529        */
setCloseScanner(boolean value)18530       public Builder setCloseScanner(boolean value) {
18531         bitField0_ |= 0x00000010;
18532         closeScanner_ = value;
18533         onChanged();
18534         return this;
18535       }
18536       /**
18537        * <code>optional bool close_scanner = 5;</code>
18538        */
clearCloseScanner()18539       public Builder clearCloseScanner() {
18540         bitField0_ = (bitField0_ & ~0x00000010);
18541         closeScanner_ = false;
18542         onChanged();
18543         return this;
18544       }
18545 
18546       // optional uint64 next_call_seq = 6;
18547       private long nextCallSeq_ ;
18548       /**
18549        * <code>optional uint64 next_call_seq = 6;</code>
18550        */
hasNextCallSeq()18551       public boolean hasNextCallSeq() {
18552         return ((bitField0_ & 0x00000020) == 0x00000020);
18553       }
18554       /**
18555        * <code>optional uint64 next_call_seq = 6;</code>
18556        */
getNextCallSeq()18557       public long getNextCallSeq() {
18558         return nextCallSeq_;
18559       }
18560       /**
18561        * <code>optional uint64 next_call_seq = 6;</code>
18562        */
setNextCallSeq(long value)18563       public Builder setNextCallSeq(long value) {
18564         bitField0_ |= 0x00000020;
18565         nextCallSeq_ = value;
18566         onChanged();
18567         return this;
18568       }
18569       /**
18570        * <code>optional uint64 next_call_seq = 6;</code>
18571        */
clearNextCallSeq()18572       public Builder clearNextCallSeq() {
18573         bitField0_ = (bitField0_ & ~0x00000020);
18574         nextCallSeq_ = 0L;
18575         onChanged();
18576         return this;
18577       }
18578 
18579       // optional bool client_handles_partials = 7;
18580       private boolean clientHandlesPartials_ ;
18581       /**
18582        * <code>optional bool client_handles_partials = 7;</code>
18583        */
hasClientHandlesPartials()18584       public boolean hasClientHandlesPartials() {
18585         return ((bitField0_ & 0x00000040) == 0x00000040);
18586       }
18587       /**
18588        * <code>optional bool client_handles_partials = 7;</code>
18589        */
getClientHandlesPartials()18590       public boolean getClientHandlesPartials() {
18591         return clientHandlesPartials_;
18592       }
18593       /**
18594        * <code>optional bool client_handles_partials = 7;</code>
18595        */
setClientHandlesPartials(boolean value)18596       public Builder setClientHandlesPartials(boolean value) {
18597         bitField0_ |= 0x00000040;
18598         clientHandlesPartials_ = value;
18599         onChanged();
18600         return this;
18601       }
18602       /**
18603        * <code>optional bool client_handles_partials = 7;</code>
18604        */
clearClientHandlesPartials()18605       public Builder clearClientHandlesPartials() {
18606         bitField0_ = (bitField0_ & ~0x00000040);
18607         clientHandlesPartials_ = false;
18608         onChanged();
18609         return this;
18610       }
18611 
18612       // optional bool client_handles_heartbeats = 8;
18613       private boolean clientHandlesHeartbeats_ ;
18614       /**
18615        * <code>optional bool client_handles_heartbeats = 8;</code>
18616        */
hasClientHandlesHeartbeats()18617       public boolean hasClientHandlesHeartbeats() {
18618         return ((bitField0_ & 0x00000080) == 0x00000080);
18619       }
18620       /**
18621        * <code>optional bool client_handles_heartbeats = 8;</code>
18622        */
getClientHandlesHeartbeats()18623       public boolean getClientHandlesHeartbeats() {
18624         return clientHandlesHeartbeats_;
18625       }
18626       /**
18627        * <code>optional bool client_handles_heartbeats = 8;</code>
18628        */
setClientHandlesHeartbeats(boolean value)18629       public Builder setClientHandlesHeartbeats(boolean value) {
18630         bitField0_ |= 0x00000080;
18631         clientHandlesHeartbeats_ = value;
18632         onChanged();
18633         return this;
18634       }
18635       /**
18636        * <code>optional bool client_handles_heartbeats = 8;</code>
18637        */
clearClientHandlesHeartbeats()18638       public Builder clearClientHandlesHeartbeats() {
18639         bitField0_ = (bitField0_ & ~0x00000080);
18640         clientHandlesHeartbeats_ = false;
18641         onChanged();
18642         return this;
18643       }
18644 
18645       // optional bool track_scan_metrics = 9;
18646       private boolean trackScanMetrics_ ;
18647       /**
18648        * <code>optional bool track_scan_metrics = 9;</code>
18649        */
hasTrackScanMetrics()18650       public boolean hasTrackScanMetrics() {
18651         return ((bitField0_ & 0x00000100) == 0x00000100);
18652       }
18653       /**
18654        * <code>optional bool track_scan_metrics = 9;</code>
18655        */
getTrackScanMetrics()18656       public boolean getTrackScanMetrics() {
18657         return trackScanMetrics_;
18658       }
18659       /**
18660        * <code>optional bool track_scan_metrics = 9;</code>
18661        */
setTrackScanMetrics(boolean value)18662       public Builder setTrackScanMetrics(boolean value) {
18663         bitField0_ |= 0x00000100;
18664         trackScanMetrics_ = value;
18665         onChanged();
18666         return this;
18667       }
18668       /**
18669        * <code>optional bool track_scan_metrics = 9;</code>
18670        */
clearTrackScanMetrics()18671       public Builder clearTrackScanMetrics() {
18672         bitField0_ = (bitField0_ & ~0x00000100);
18673         trackScanMetrics_ = false;
18674         onChanged();
18675         return this;
18676       }
18677 
18678       // optional bool renew = 10 [default = false];
18679       private boolean renew_ ;
18680       /**
18681        * <code>optional bool renew = 10 [default = false];</code>
18682        */
hasRenew()18683       public boolean hasRenew() {
18684         return ((bitField0_ & 0x00000200) == 0x00000200);
18685       }
18686       /**
18687        * <code>optional bool renew = 10 [default = false];</code>
18688        */
getRenew()18689       public boolean getRenew() {
18690         return renew_;
18691       }
18692       /**
18693        * <code>optional bool renew = 10 [default = false];</code>
18694        */
setRenew(boolean value)18695       public Builder setRenew(boolean value) {
18696         bitField0_ |= 0x00000200;
18697         renew_ = value;
18698         onChanged();
18699         return this;
18700       }
18701       /**
18702        * <code>optional bool renew = 10 [default = false];</code>
18703        */
clearRenew()18704       public Builder clearRenew() {
18705         bitField0_ = (bitField0_ & ~0x00000200);
18706         renew_ = false;
18707         onChanged();
18708         return this;
18709       }
18710 
18711       // @@protoc_insertion_point(builder_scope:ScanRequest)
18712     }
18713 
18714     static {
18715       defaultInstance = new ScanRequest(true);
defaultInstance.initFields()18716       defaultInstance.initFields();
18717     }
18718 
18719     // @@protoc_insertion_point(class_scope:ScanRequest)
18720   }
18721 
18722   public interface ScanResponseOrBuilder
18723       extends com.google.protobuf.MessageOrBuilder {
18724 
18725     // repeated uint32 cells_per_result = 1;
18726     /**
18727      * <code>repeated uint32 cells_per_result = 1;</code>
18728      *
18729      * <pre>
18730      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18731      * of all Cells serialized out as one cellblock BUT responses from a server
18732      * have their Cells grouped by Result.  So we can reconstitute the
18733      * Results on the client-side, this field is a list of counts of Cells
18734      * in each Result that makes up the response.  For example, if this field
18735      * has 3, 3, 3 in it, then we know that on the client, we are to make
18736      * three Results each of three Cells each.
18737      * </pre>
18738      */
getCellsPerResultList()18739     java.util.List<java.lang.Integer> getCellsPerResultList();
18740     /**
18741      * <code>repeated uint32 cells_per_result = 1;</code>
18742      *
18743      * <pre>
18744      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18745      * of all Cells serialized out as one cellblock BUT responses from a server
18746      * have their Cells grouped by Result.  So we can reconstitute the
18747      * Results on the client-side, this field is a list of counts of Cells
18748      * in each Result that makes up the response.  For example, if this field
18749      * has 3, 3, 3 in it, then we know that on the client, we are to make
18750      * three Results each of three Cells each.
18751      * </pre>
18752      */
getCellsPerResultCount()18753     int getCellsPerResultCount();
18754     /**
18755      * <code>repeated uint32 cells_per_result = 1;</code>
18756      *
18757      * <pre>
18758      * This field is filled in if we are doing cellblocks.  A cellblock is made up
18759      * of all Cells serialized out as one cellblock BUT responses from a server
18760      * have their Cells grouped by Result.  So we can reconstitute the
18761      * Results on the client-side, this field is a list of counts of Cells
18762      * in each Result that makes up the response.  For example, if this field
18763      * has 3, 3, 3 in it, then we know that on the client, we are to make
18764      * three Results each of three Cells each.
18765      * </pre>
18766      */
getCellsPerResult(int index)18767     int getCellsPerResult(int index);
18768 
18769     // optional uint64 scanner_id = 2;
18770     /**
18771      * <code>optional uint64 scanner_id = 2;</code>
18772      */
hasScannerId()18773     boolean hasScannerId();
18774     /**
18775      * <code>optional uint64 scanner_id = 2;</code>
18776      */
getScannerId()18777     long getScannerId();
18778 
18779     // optional bool more_results = 3;
18780     /**
18781      * <code>optional bool more_results = 3;</code>
18782      */
hasMoreResults()18783     boolean hasMoreResults();
18784     /**
18785      * <code>optional bool more_results = 3;</code>
18786      */
getMoreResults()18787     boolean getMoreResults();
18788 
18789     // optional uint32 ttl = 4;
18790     /**
18791      * <code>optional uint32 ttl = 4;</code>
18792      */
hasTtl()18793     boolean hasTtl();
18794     /**
18795      * <code>optional uint32 ttl = 4;</code>
18796      */
getTtl()18797     int getTtl();
18798 
18799     // repeated .Result results = 5;
18800     /**
18801      * <code>repeated .Result results = 5;</code>
18802      *
18803      * <pre>
18804      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18805      * This field is mutually exclusive with cells_per_result (since the Cells will
18806      * be inside the pb'd Result)
18807      * </pre>
18808      */
18809     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>
getResultsList()18810         getResultsList();
18811     /**
18812      * <code>repeated .Result results = 5;</code>
18813      *
18814      * <pre>
18815      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18816      * This field is mutually exclusive with cells_per_result (since the Cells will
18817      * be inside the pb'd Result)
18818      * </pre>
18819      */
getResults(int index)18820     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index);
18821     /**
18822      * <code>repeated .Result results = 5;</code>
18823      *
18824      * <pre>
18825      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18826      * This field is mutually exclusive with cells_per_result (since the Cells will
18827      * be inside the pb'd Result)
18828      * </pre>
18829      */
getResultsCount()18830     int getResultsCount();
18831     /**
18832      * <code>repeated .Result results = 5;</code>
18833      *
18834      * <pre>
18835      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18836      * This field is mutually exclusive with cells_per_result (since the Cells will
18837      * be inside the pb'd Result)
18838      * </pre>
18839      */
18840     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsOrBuilderList()18841         getResultsOrBuilderList();
18842     /**
18843      * <code>repeated .Result results = 5;</code>
18844      *
18845      * <pre>
18846      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
18847      * This field is mutually exclusive with cells_per_result (since the Cells will
18848      * be inside the pb'd Result)
18849      * </pre>
18850      */
getResultsOrBuilder( int index)18851     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
18852         int index);
18853 
18854     // optional bool stale = 6;
18855     /**
18856      * <code>optional bool stale = 6;</code>
18857      */
hasStale()18858     boolean hasStale();
18859     /**
18860      * <code>optional bool stale = 6;</code>
18861      */
getStale()18862     boolean getStale();
18863 
18864     // repeated bool partial_flag_per_result = 7;
18865     /**
18866      * <code>repeated bool partial_flag_per_result = 7;</code>
18867      *
18868      * <pre>
18869      * This field is filled in if we are doing cellblocks. In the event that a row
18870      * could not fit all of its cells into a single RPC chunk, the results will be
18871      * returned as partials, and reconstructed into a complete result on the client
18872      * side. This field is a list of flags indicating whether or not the result
18873      * that the cells belong to is a partial result. For example, if this field
18874      * has false, false, true in it, then we know that on the client side, we need to
18875      * make another RPC request since the last result was only a partial.
18876      * </pre>
18877      */
getPartialFlagPerResultList()18878     java.util.List<java.lang.Boolean> getPartialFlagPerResultList();
18879     /**
18880      * <code>repeated bool partial_flag_per_result = 7;</code>
18881      *
18882      * <pre>
18883      * This field is filled in if we are doing cellblocks. In the event that a row
18884      * could not fit all of its cells into a single RPC chunk, the results will be
18885      * returned as partials, and reconstructed into a complete result on the client
18886      * side. This field is a list of flags indicating whether or not the result
18887      * that the cells belong to is a partial result. For example, if this field
18888      * has false, false, true in it, then we know that on the client side, we need to
18889      * make another RPC request since the last result was only a partial.
18890      * </pre>
18891      */
getPartialFlagPerResultCount()18892     int getPartialFlagPerResultCount();
18893     /**
18894      * <code>repeated bool partial_flag_per_result = 7;</code>
18895      *
18896      * <pre>
18897      * This field is filled in if we are doing cellblocks. In the event that a row
18898      * could not fit all of its cells into a single RPC chunk, the results will be
18899      * returned as partials, and reconstructed into a complete result on the client
18900      * side. This field is a list of flags indicating whether or not the result
18901      * that the cells belong to is a partial result. For example, if this field
18902      * has false, false, true in it, then we know that on the client side, we need to
18903      * make another RPC request since the last result was only a partial.
18904      * </pre>
18905      */
getPartialFlagPerResult(int index)18906     boolean getPartialFlagPerResult(int index);
18907 
18908     // optional bool more_results_in_region = 8;
18909     /**
18910      * <code>optional bool more_results_in_region = 8;</code>
18911      *
18912      * <pre>
18913      * A server may choose to limit the number of results returned to the client for
18914      * reasons such as the size in bytes or quantity of results accumulated. This field
18915      * will true when more results exist in the current region.
18916      * </pre>
18917      */
hasMoreResultsInRegion()18918     boolean hasMoreResultsInRegion();
18919     /**
18920      * <code>optional bool more_results_in_region = 8;</code>
18921      *
18922      * <pre>
18923      * A server may choose to limit the number of results returned to the client for
18924      * reasons such as the size in bytes or quantity of results accumulated. This field
18925      * will true when more results exist in the current region.
18926      * </pre>
18927      */
getMoreResultsInRegion()18928     boolean getMoreResultsInRegion();
18929 
18930     // optional bool heartbeat_message = 9;
18931     /**
18932      * <code>optional bool heartbeat_message = 9;</code>
18933      *
18934      * <pre>
18935      * This field is filled in if the server is sending back a heartbeat message.
18936      * Heartbeat messages are sent back to the client to prevent the scanner from
18937      * timing out. Seeing a heartbeat message communicates to the Client that the
18938      * server would have continued to scan had the time limit not been reached.
18939      * </pre>
18940      */
hasHeartbeatMessage()18941     boolean hasHeartbeatMessage();
18942     /**
18943      * <code>optional bool heartbeat_message = 9;</code>
18944      *
18945      * <pre>
18946      * This field is filled in if the server is sending back a heartbeat message.
18947      * Heartbeat messages are sent back to the client to prevent the scanner from
18948      * timing out. Seeing a heartbeat message communicates to the Client that the
18949      * server would have continued to scan had the time limit not been reached.
18950      * </pre>
18951      */
getHeartbeatMessage()18952     boolean getHeartbeatMessage();
18953 
18954     // optional .ScanMetrics scan_metrics = 10;
18955     /**
18956      * <code>optional .ScanMetrics scan_metrics = 10;</code>
18957      *
18958      * <pre>
18959      * This field is filled in if the client has requested that scan metrics be tracked.
18960      * The metrics tracked here are sent back to the client to be tracked together with
18961      * the existing client side metrics.
18962      * </pre>
18963      */
hasScanMetrics()18964     boolean hasScanMetrics();
18965     /**
18966      * <code>optional .ScanMetrics scan_metrics = 10;</code>
18967      *
18968      * <pre>
18969      * This field is filled in if the client has requested that scan metrics be tracked.
18970      * The metrics tracked here are sent back to the client to be tracked together with
18971      * the existing client side metrics.
18972      * </pre>
18973      */
getScanMetrics()18974     org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics();
18975     /**
18976      * <code>optional .ScanMetrics scan_metrics = 10;</code>
18977      *
18978      * <pre>
18979      * This field is filled in if the client has requested that scan metrics be tracked.
18980      * The metrics tracked here are sent back to the client to be tracked together with
18981      * the existing client side metrics.
18982      * </pre>
18983      */
getScanMetricsOrBuilder()18984     org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder();
18985   }
18986   /**
18987    * Protobuf type {@code ScanResponse}
18988    *
18989    * <pre>
18990    **
18991    * The scan response. If there are no more results, more_results will
18992    * be false.  If it is not specified, it means there are more.
18993    * </pre>
18994    */
18995   public static final class ScanResponse extends
18996       com.google.protobuf.GeneratedMessage
18997       implements ScanResponseOrBuilder {
18998     // Use ScanResponse.newBuilder() to construct.
ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)18999     private ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
19000       super(builder);
19001       this.unknownFields = builder.getUnknownFields();
19002     }
ScanResponse(boolean noInit)19003     private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
19004 
19005     private static final ScanResponse defaultInstance;
getDefaultInstance()19006     public static ScanResponse getDefaultInstance() {
19007       return defaultInstance;
19008     }
19009 
getDefaultInstanceForType()19010     public ScanResponse getDefaultInstanceForType() {
19011       return defaultInstance;
19012     }
19013 
19014     private final com.google.protobuf.UnknownFieldSet unknownFields;
19015     @java.lang.Override
19016     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()19017         getUnknownFields() {
19018       return this.unknownFields;
19019     }
ScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19020     private ScanResponse(
19021         com.google.protobuf.CodedInputStream input,
19022         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19023         throws com.google.protobuf.InvalidProtocolBufferException {
19024       initFields();
19025       int mutable_bitField0_ = 0;
19026       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
19027           com.google.protobuf.UnknownFieldSet.newBuilder();
19028       try {
19029         boolean done = false;
19030         while (!done) {
19031           int tag = input.readTag();
19032           switch (tag) {
19033             case 0:
19034               done = true;
19035               break;
19036             default: {
19037               if (!parseUnknownField(input, unknownFields,
19038                                      extensionRegistry, tag)) {
19039                 done = true;
19040               }
19041               break;
19042             }
19043             case 8: {
19044               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
19045                 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
19046                 mutable_bitField0_ |= 0x00000001;
19047               }
19048               cellsPerResult_.add(input.readUInt32());
19049               break;
19050             }
19051             case 10: {
19052               int length = input.readRawVarint32();
19053               int limit = input.pushLimit(length);
19054               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) {
19055                 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>();
19056                 mutable_bitField0_ |= 0x00000001;
19057               }
19058               while (input.getBytesUntilLimit() > 0) {
19059                 cellsPerResult_.add(input.readUInt32());
19060               }
19061               input.popLimit(limit);
19062               break;
19063             }
19064             case 16: {
19065               bitField0_ |= 0x00000001;
19066               scannerId_ = input.readUInt64();
19067               break;
19068             }
19069             case 24: {
19070               bitField0_ |= 0x00000002;
19071               moreResults_ = input.readBool();
19072               break;
19073             }
19074             case 32: {
19075               bitField0_ |= 0x00000004;
19076               ttl_ = input.readUInt32();
19077               break;
19078             }
19079             case 42: {
19080               if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
19081                 results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>();
19082                 mutable_bitField0_ |= 0x00000010;
19083               }
19084               results_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry));
19085               break;
19086             }
19087             case 48: {
19088               bitField0_ |= 0x00000008;
19089               stale_ = input.readBool();
19090               break;
19091             }
19092             case 56: {
19093               if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
19094                 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
19095                 mutable_bitField0_ |= 0x00000040;
19096               }
19097               partialFlagPerResult_.add(input.readBool());
19098               break;
19099             }
19100             case 58: {
19101               int length = input.readRawVarint32();
19102               int limit = input.pushLimit(length);
19103               if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) {
19104                 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>();
19105                 mutable_bitField0_ |= 0x00000040;
19106               }
19107               while (input.getBytesUntilLimit() > 0) {
19108                 partialFlagPerResult_.add(input.readBool());
19109               }
19110               input.popLimit(limit);
19111               break;
19112             }
19113             case 64: {
19114               bitField0_ |= 0x00000010;
19115               moreResultsInRegion_ = input.readBool();
19116               break;
19117             }
19118             case 72: {
19119               bitField0_ |= 0x00000020;
19120               heartbeatMessage_ = input.readBool();
19121               break;
19122             }
19123             case 82: {
19124               org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder subBuilder = null;
19125               if (((bitField0_ & 0x00000040) == 0x00000040)) {
19126                 subBuilder = scanMetrics_.toBuilder();
19127               }
19128               scanMetrics_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.PARSER, extensionRegistry);
19129               if (subBuilder != null) {
19130                 subBuilder.mergeFrom(scanMetrics_);
19131                 scanMetrics_ = subBuilder.buildPartial();
19132               }
19133               bitField0_ |= 0x00000040;
19134               break;
19135             }
19136           }
19137         }
19138       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
19139         throw e.setUnfinishedMessage(this);
19140       } catch (java.io.IOException e) {
19141         throw new com.google.protobuf.InvalidProtocolBufferException(
19142             e.getMessage()).setUnfinishedMessage(this);
19143       } finally {
19144         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
19145           cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
19146         }
19147         if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) {
19148           results_ = java.util.Collections.unmodifiableList(results_);
19149         }
19150         if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) {
19151           partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
19152         }
19153         this.unknownFields = unknownFields.build();
19154         makeExtensionsImmutable();
19155       }
19156     }
19157     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()19158         getDescriptor() {
19159       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor;
19160     }
19161 
19162     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()19163         internalGetFieldAccessorTable() {
19164       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable
19165           .ensureFieldAccessorsInitialized(
19166               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
19167     }
19168 
19169     public static com.google.protobuf.Parser<ScanResponse> PARSER =
19170         new com.google.protobuf.AbstractParser<ScanResponse>() {
19171       public ScanResponse parsePartialFrom(
19172           com.google.protobuf.CodedInputStream input,
19173           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19174           throws com.google.protobuf.InvalidProtocolBufferException {
19175         return new ScanResponse(input, extensionRegistry);
19176       }
19177     };
19178 
19179     @java.lang.Override
getParserForType()19180     public com.google.protobuf.Parser<ScanResponse> getParserForType() {
19181       return PARSER;
19182     }
19183 
19184     private int bitField0_;
19185     // repeated uint32 cells_per_result = 1;
19186     public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1;
19187     private java.util.List<java.lang.Integer> cellsPerResult_;
19188     /**
19189      * <code>repeated uint32 cells_per_result = 1;</code>
19190      *
19191      * <pre>
19192      * This field is filled in if we are doing cellblocks.  A cellblock is made up
19193      * of all Cells serialized out as one cellblock BUT responses from a server
19194      * have their Cells grouped by Result.  So we can reconstitute the
19195      * Results on the client-side, this field is a list of counts of Cells
19196      * in each Result that makes up the response.  For example, if this field
19197      * has 3, 3, 3 in it, then we know that on the client, we are to make
19198      * three Results each of three Cells each.
19199      * </pre>
19200      */
19201     public java.util.List<java.lang.Integer>
getCellsPerResultList()19202         getCellsPerResultList() {
19203       return cellsPerResult_;
19204     }
19205     /**
19206      * <code>repeated uint32 cells_per_result = 1;</code>
19207      *
19208      * <pre>
19209      * This field is filled in if we are doing cellblocks.  A cellblock is made up
19210      * of all Cells serialized out as one cellblock BUT responses from a server
19211      * have their Cells grouped by Result.  So we can reconstitute the
19212      * Results on the client-side, this field is a list of counts of Cells
19213      * in each Result that makes up the response.  For example, if this field
19214      * has 3, 3, 3 in it, then we know that on the client, we are to make
19215      * three Results each of three Cells each.
19216      * </pre>
19217      */
getCellsPerResultCount()19218     public int getCellsPerResultCount() {
19219       return cellsPerResult_.size();
19220     }
19221     /**
19222      * <code>repeated uint32 cells_per_result = 1;</code>
19223      *
19224      * <pre>
19225      * This field is filled in if we are doing cellblocks.  A cellblock is made up
19226      * of all Cells serialized out as one cellblock BUT responses from a server
19227      * have their Cells grouped by Result.  So we can reconstitute the
19228      * Results on the client-side, this field is a list of counts of Cells
19229      * in each Result that makes up the response.  For example, if this field
19230      * has 3, 3, 3 in it, then we know that on the client, we are to make
19231      * three Results each of three Cells each.
19232      * </pre>
19233      */
getCellsPerResult(int index)19234     public int getCellsPerResult(int index) {
19235       return cellsPerResult_.get(index);
19236     }
19237 
19238     // optional uint64 scanner_id = 2;
19239     public static final int SCANNER_ID_FIELD_NUMBER = 2;
19240     private long scannerId_;
19241     /**
19242      * <code>optional uint64 scanner_id = 2;</code>
19243      */
hasScannerId()19244     public boolean hasScannerId() {
19245       return ((bitField0_ & 0x00000001) == 0x00000001);
19246     }
19247     /**
19248      * <code>optional uint64 scanner_id = 2;</code>
19249      */
getScannerId()19250     public long getScannerId() {
19251       return scannerId_;
19252     }
19253 
19254     // optional bool more_results = 3;
19255     public static final int MORE_RESULTS_FIELD_NUMBER = 3;
19256     private boolean moreResults_;
19257     /**
19258      * <code>optional bool more_results = 3;</code>
19259      */
hasMoreResults()19260     public boolean hasMoreResults() {
19261       return ((bitField0_ & 0x00000002) == 0x00000002);
19262     }
19263     /**
19264      * <code>optional bool more_results = 3;</code>
19265      */
getMoreResults()19266     public boolean getMoreResults() {
19267       return moreResults_;
19268     }
19269 
19270     // optional uint32 ttl = 4;
19271     public static final int TTL_FIELD_NUMBER = 4;
19272     private int ttl_;
19273     /**
19274      * <code>optional uint32 ttl = 4;</code>
19275      */
hasTtl()19276     public boolean hasTtl() {
19277       return ((bitField0_ & 0x00000004) == 0x00000004);
19278     }
19279     /**
19280      * <code>optional uint32 ttl = 4;</code>
19281      */
getTtl()19282     public int getTtl() {
19283       return ttl_;
19284     }
19285 
19286     // repeated .Result results = 5;
19287     public static final int RESULTS_FIELD_NUMBER = 5;
19288     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_;
19289     /**
19290      * <code>repeated .Result results = 5;</code>
19291      *
19292      * <pre>
19293      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19294      * This field is mutually exclusive with cells_per_result (since the Cells will
19295      * be inside the pb'd Result)
19296      * </pre>
19297      */
getResultsList()19298     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
19299       return results_;
19300     }
19301     /**
19302      * <code>repeated .Result results = 5;</code>
19303      *
19304      * <pre>
19305      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19306      * This field is mutually exclusive with cells_per_result (since the Cells will
19307      * be inside the pb'd Result)
19308      * </pre>
19309      */
19310     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsOrBuilderList()19311         getResultsOrBuilderList() {
19312       return results_;
19313     }
19314     /**
19315      * <code>repeated .Result results = 5;</code>
19316      *
19317      * <pre>
19318      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19319      * This field is mutually exclusive with cells_per_result (since the Cells will
19320      * be inside the pb'd Result)
19321      * </pre>
19322      */
getResultsCount()19323     public int getResultsCount() {
19324       return results_.size();
19325     }
19326     /**
19327      * <code>repeated .Result results = 5;</code>
19328      *
19329      * <pre>
19330      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19331      * This field is mutually exclusive with cells_per_result (since the Cells will
19332      * be inside the pb'd Result)
19333      * </pre>
19334      */
getResults(int index)19335     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
19336       return results_.get(index);
19337     }
19338     /**
19339      * <code>repeated .Result results = 5;</code>
19340      *
19341      * <pre>
19342      * If cells are not carried in an accompanying cellblock, then they are pb'd here.
19343      * This field is mutually exclusive with cells_per_result (since the Cells will
19344      * be inside the pb'd Result)
19345      * </pre>
19346      */
getResultsOrBuilder( int index)19347     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
19348         int index) {
19349       return results_.get(index);
19350     }
19351 
19352     // optional bool stale = 6;
19353     public static final int STALE_FIELD_NUMBER = 6;
19354     private boolean stale_;
19355     /**
19356      * <code>optional bool stale = 6;</code>
19357      */
hasStale()19358     public boolean hasStale() {
19359       return ((bitField0_ & 0x00000008) == 0x00000008);
19360     }
19361     /**
19362      * <code>optional bool stale = 6;</code>
19363      */
getStale()19364     public boolean getStale() {
19365       return stale_;
19366     }
19367 
19368     // repeated bool partial_flag_per_result = 7;
19369     public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7;
19370     private java.util.List<java.lang.Boolean> partialFlagPerResult_;
19371     /**
19372      * <code>repeated bool partial_flag_per_result = 7;</code>
19373      *
19374      * <pre>
19375      * This field is filled in if we are doing cellblocks. In the event that a row
19376      * could not fit all of its cells into a single RPC chunk, the results will be
19377      * returned as partials, and reconstructed into a complete result on the client
19378      * side. This field is a list of flags indicating whether or not the result
19379      * that the cells belong to is a partial result. For example, if this field
19380      * has false, false, true in it, then we know that on the client side, we need to
19381      * make another RPC request since the last result was only a partial.
19382      * </pre>
19383      */
19384     public java.util.List<java.lang.Boolean>
getPartialFlagPerResultList()19385         getPartialFlagPerResultList() {
19386       return partialFlagPerResult_;
19387     }
19388     /**
19389      * <code>repeated bool partial_flag_per_result = 7;</code>
19390      *
19391      * <pre>
19392      * This field is filled in if we are doing cellblocks. In the event that a row
19393      * could not fit all of its cells into a single RPC chunk, the results will be
19394      * returned as partials, and reconstructed into a complete result on the client
19395      * side. This field is a list of flags indicating whether or not the result
19396      * that the cells belong to is a partial result. For example, if this field
19397      * has false, false, true in it, then we know that on the client side, we need to
19398      * make another RPC request since the last result was only a partial.
19399      * </pre>
19400      */
getPartialFlagPerResultCount()19401     public int getPartialFlagPerResultCount() {
19402       return partialFlagPerResult_.size();
19403     }
19404     /**
19405      * <code>repeated bool partial_flag_per_result = 7;</code>
19406      *
19407      * <pre>
19408      * This field is filled in if we are doing cellblocks. In the event that a row
19409      * could not fit all of its cells into a single RPC chunk, the results will be
19410      * returned as partials, and reconstructed into a complete result on the client
19411      * side. This field is a list of flags indicating whether or not the result
19412      * that the cells belong to is a partial result. For example, if this field
19413      * has false, false, true in it, then we know that on the client side, we need to
19414      * make another RPC request since the last result was only a partial.
19415      * </pre>
19416      */
getPartialFlagPerResult(int index)19417     public boolean getPartialFlagPerResult(int index) {
19418       return partialFlagPerResult_.get(index);
19419     }
19420 
19421     // optional bool more_results_in_region = 8;
19422     public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8;
19423     private boolean moreResultsInRegion_;
19424     /**
19425      * <code>optional bool more_results_in_region = 8;</code>
19426      *
19427      * <pre>
19428      * A server may choose to limit the number of results returned to the client for
19429      * reasons such as the size in bytes or quantity of results accumulated. This field
19430      * will true when more results exist in the current region.
19431      * </pre>
19432      */
hasMoreResultsInRegion()19433     public boolean hasMoreResultsInRegion() {
19434       return ((bitField0_ & 0x00000010) == 0x00000010);
19435     }
19436     /**
19437      * <code>optional bool more_results_in_region = 8;</code>
19438      *
19439      * <pre>
19440      * A server may choose to limit the number of results returned to the client for
19441      * reasons such as the size in bytes or quantity of results accumulated. This field
19442      * will true when more results exist in the current region.
19443      * </pre>
19444      */
getMoreResultsInRegion()19445     public boolean getMoreResultsInRegion() {
19446       return moreResultsInRegion_;
19447     }
19448 
19449     // optional bool heartbeat_message = 9;
19450     public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9;
19451     private boolean heartbeatMessage_;
19452     /**
19453      * <code>optional bool heartbeat_message = 9;</code>
19454      *
19455      * <pre>
19456      * This field is filled in if the server is sending back a heartbeat message.
19457      * Heartbeat messages are sent back to the client to prevent the scanner from
19458      * timing out. Seeing a heartbeat message communicates to the Client that the
19459      * server would have continued to scan had the time limit not been reached.
19460      * </pre>
19461      */
hasHeartbeatMessage()19462     public boolean hasHeartbeatMessage() {
19463       return ((bitField0_ & 0x00000020) == 0x00000020);
19464     }
19465     /**
19466      * <code>optional bool heartbeat_message = 9;</code>
19467      *
19468      * <pre>
19469      * This field is filled in if the server is sending back a heartbeat message.
19470      * Heartbeat messages are sent back to the client to prevent the scanner from
19471      * timing out. Seeing a heartbeat message communicates to the Client that the
19472      * server would have continued to scan had the time limit not been reached.
19473      * </pre>
19474      */
getHeartbeatMessage()19475     public boolean getHeartbeatMessage() {
19476       return heartbeatMessage_;
19477     }
19478 
19479     // optional .ScanMetrics scan_metrics = 10;
19480     public static final int SCAN_METRICS_FIELD_NUMBER = 10;
19481     private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_;
19482     /**
19483      * <code>optional .ScanMetrics scan_metrics = 10;</code>
19484      *
19485      * <pre>
19486      * This field is filled in if the client has requested that scan metrics be tracked.
19487      * The metrics tracked here are sent back to the client to be tracked together with
19488      * the existing client side metrics.
19489      * </pre>
19490      */
hasScanMetrics()19491     public boolean hasScanMetrics() {
19492       return ((bitField0_ & 0x00000040) == 0x00000040);
19493     }
19494     /**
19495      * <code>optional .ScanMetrics scan_metrics = 10;</code>
19496      *
19497      * <pre>
19498      * This field is filled in if the client has requested that scan metrics be tracked.
19499      * The metrics tracked here are sent back to the client to be tracked together with
19500      * the existing client side metrics.
19501      * </pre>
19502      */
getScanMetrics()19503     public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() {
19504       return scanMetrics_;
19505     }
19506     /**
19507      * <code>optional .ScanMetrics scan_metrics = 10;</code>
19508      *
19509      * <pre>
19510      * This field is filled in if the client has requested that scan metrics be tracked.
19511      * The metrics tracked here are sent back to the client to be tracked together with
19512      * the existing client side metrics.
19513      * </pre>
19514      */
getScanMetricsOrBuilder()19515     public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() {
19516       return scanMetrics_;
19517     }
19518 
initFields()19519     private void initFields() {
19520       cellsPerResult_ = java.util.Collections.emptyList();
19521       scannerId_ = 0L;
19522       moreResults_ = false;
19523       ttl_ = 0;
19524       results_ = java.util.Collections.emptyList();
19525       stale_ = false;
19526       partialFlagPerResult_ = java.util.Collections.emptyList();
19527       moreResultsInRegion_ = false;
19528       heartbeatMessage_ = false;
19529       scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
19530     }
19531     private byte memoizedIsInitialized = -1;
isInitialized()19532     public final boolean isInitialized() {
19533       byte isInitialized = memoizedIsInitialized;
19534       if (isInitialized != -1) return isInitialized == 1;
19535 
19536       memoizedIsInitialized = 1;
19537       return true;
19538     }
19539 
writeTo(com.google.protobuf.CodedOutputStream output)19540     public void writeTo(com.google.protobuf.CodedOutputStream output)
19541                         throws java.io.IOException {
19542       getSerializedSize();
19543       for (int i = 0; i < cellsPerResult_.size(); i++) {
19544         output.writeUInt32(1, cellsPerResult_.get(i));
19545       }
19546       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19547         output.writeUInt64(2, scannerId_);
19548       }
19549       if (((bitField0_ & 0x00000002) == 0x00000002)) {
19550         output.writeBool(3, moreResults_);
19551       }
19552       if (((bitField0_ & 0x00000004) == 0x00000004)) {
19553         output.writeUInt32(4, ttl_);
19554       }
19555       for (int i = 0; i < results_.size(); i++) {
19556         output.writeMessage(5, results_.get(i));
19557       }
19558       if (((bitField0_ & 0x00000008) == 0x00000008)) {
19559         output.writeBool(6, stale_);
19560       }
19561       for (int i = 0; i < partialFlagPerResult_.size(); i++) {
19562         output.writeBool(7, partialFlagPerResult_.get(i));
19563       }
19564       if (((bitField0_ & 0x00000010) == 0x00000010)) {
19565         output.writeBool(8, moreResultsInRegion_);
19566       }
19567       if (((bitField0_ & 0x00000020) == 0x00000020)) {
19568         output.writeBool(9, heartbeatMessage_);
19569       }
19570       if (((bitField0_ & 0x00000040) == 0x00000040)) {
19571         output.writeMessage(10, scanMetrics_);
19572       }
19573       getUnknownFields().writeTo(output);
19574     }
19575 
19576     private int memoizedSerializedSize = -1;
getSerializedSize()19577     public int getSerializedSize() {
19578       int size = memoizedSerializedSize;
19579       if (size != -1) return size;
19580 
19581       size = 0;
19582       {
19583         int dataSize = 0;
19584         for (int i = 0; i < cellsPerResult_.size(); i++) {
19585           dataSize += com.google.protobuf.CodedOutputStream
19586             .computeUInt32SizeNoTag(cellsPerResult_.get(i));
19587         }
19588         size += dataSize;
19589         size += 1 * getCellsPerResultList().size();
19590       }
19591       if (((bitField0_ & 0x00000001) == 0x00000001)) {
19592         size += com.google.protobuf.CodedOutputStream
19593           .computeUInt64Size(2, scannerId_);
19594       }
19595       if (((bitField0_ & 0x00000002) == 0x00000002)) {
19596         size += com.google.protobuf.CodedOutputStream
19597           .computeBoolSize(3, moreResults_);
19598       }
19599       if (((bitField0_ & 0x00000004) == 0x00000004)) {
19600         size += com.google.protobuf.CodedOutputStream
19601           .computeUInt32Size(4, ttl_);
19602       }
19603       for (int i = 0; i < results_.size(); i++) {
19604         size += com.google.protobuf.CodedOutputStream
19605           .computeMessageSize(5, results_.get(i));
19606       }
19607       if (((bitField0_ & 0x00000008) == 0x00000008)) {
19608         size += com.google.protobuf.CodedOutputStream
19609           .computeBoolSize(6, stale_);
19610       }
19611       {
19612         int dataSize = 0;
19613         dataSize = 1 * getPartialFlagPerResultList().size();
19614         size += dataSize;
19615         size += 1 * getPartialFlagPerResultList().size();
19616       }
19617       if (((bitField0_ & 0x00000010) == 0x00000010)) {
19618         size += com.google.protobuf.CodedOutputStream
19619           .computeBoolSize(8, moreResultsInRegion_);
19620       }
19621       if (((bitField0_ & 0x00000020) == 0x00000020)) {
19622         size += com.google.protobuf.CodedOutputStream
19623           .computeBoolSize(9, heartbeatMessage_);
19624       }
19625       if (((bitField0_ & 0x00000040) == 0x00000040)) {
19626         size += com.google.protobuf.CodedOutputStream
19627           .computeMessageSize(10, scanMetrics_);
19628       }
19629       size += getUnknownFields().getSerializedSize();
19630       memoizedSerializedSize = size;
19631       return size;
19632     }
19633 
19634     private static final long serialVersionUID = 0L;
19635     @java.lang.Override
writeReplace()19636     protected java.lang.Object writeReplace()
19637         throws java.io.ObjectStreamException {
19638       return super.writeReplace();
19639     }
19640 
19641     @java.lang.Override
equals(final java.lang.Object obj)19642     public boolean equals(final java.lang.Object obj) {
19643       if (obj == this) {
19644        return true;
19645       }
19646       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) {
19647         return super.equals(obj);
19648       }
19649       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj;
19650 
19651       boolean result = true;
19652       result = result && getCellsPerResultList()
19653           .equals(other.getCellsPerResultList());
19654       result = result && (hasScannerId() == other.hasScannerId());
19655       if (hasScannerId()) {
19656         result = result && (getScannerId()
19657             == other.getScannerId());
19658       }
19659       result = result && (hasMoreResults() == other.hasMoreResults());
19660       if (hasMoreResults()) {
19661         result = result && (getMoreResults()
19662             == other.getMoreResults());
19663       }
19664       result = result && (hasTtl() == other.hasTtl());
19665       if (hasTtl()) {
19666         result = result && (getTtl()
19667             == other.getTtl());
19668       }
19669       result = result && getResultsList()
19670           .equals(other.getResultsList());
19671       result = result && (hasStale() == other.hasStale());
19672       if (hasStale()) {
19673         result = result && (getStale()
19674             == other.getStale());
19675       }
19676       result = result && getPartialFlagPerResultList()
19677           .equals(other.getPartialFlagPerResultList());
19678       result = result && (hasMoreResultsInRegion() == other.hasMoreResultsInRegion());
19679       if (hasMoreResultsInRegion()) {
19680         result = result && (getMoreResultsInRegion()
19681             == other.getMoreResultsInRegion());
19682       }
19683       result = result && (hasHeartbeatMessage() == other.hasHeartbeatMessage());
19684       if (hasHeartbeatMessage()) {
19685         result = result && (getHeartbeatMessage()
19686             == other.getHeartbeatMessage());
19687       }
19688       result = result && (hasScanMetrics() == other.hasScanMetrics());
19689       if (hasScanMetrics()) {
19690         result = result && getScanMetrics()
19691             .equals(other.getScanMetrics());
19692       }
19693       result = result &&
19694           getUnknownFields().equals(other.getUnknownFields());
19695       return result;
19696     }
19697 
19698     private int memoizedHashCode = 0;
19699     @java.lang.Override
hashCode()19700     public int hashCode() {
19701       if (memoizedHashCode != 0) {
19702         return memoizedHashCode;
19703       }
19704       int hash = 41;
19705       hash = (19 * hash) + getDescriptorForType().hashCode();
19706       if (getCellsPerResultCount() > 0) {
19707         hash = (37 * hash) + CELLS_PER_RESULT_FIELD_NUMBER;
19708         hash = (53 * hash) + getCellsPerResultList().hashCode();
19709       }
19710       if (hasScannerId()) {
19711         hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER;
19712         hash = (53 * hash) + hashLong(getScannerId());
19713       }
19714       if (hasMoreResults()) {
19715         hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER;
19716         hash = (53 * hash) + hashBoolean(getMoreResults());
19717       }
19718       if (hasTtl()) {
19719         hash = (37 * hash) + TTL_FIELD_NUMBER;
19720         hash = (53 * hash) + getTtl();
19721       }
19722       if (getResultsCount() > 0) {
19723         hash = (37 * hash) + RESULTS_FIELD_NUMBER;
19724         hash = (53 * hash) + getResultsList().hashCode();
19725       }
19726       if (hasStale()) {
19727         hash = (37 * hash) + STALE_FIELD_NUMBER;
19728         hash = (53 * hash) + hashBoolean(getStale());
19729       }
19730       if (getPartialFlagPerResultCount() > 0) {
19731         hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER;
19732         hash = (53 * hash) + getPartialFlagPerResultList().hashCode();
19733       }
19734       if (hasMoreResultsInRegion()) {
19735         hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER;
19736         hash = (53 * hash) + hashBoolean(getMoreResultsInRegion());
19737       }
19738       if (hasHeartbeatMessage()) {
19739         hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER;
19740         hash = (53 * hash) + hashBoolean(getHeartbeatMessage());
19741       }
19742       if (hasScanMetrics()) {
19743         hash = (37 * hash) + SCAN_METRICS_FIELD_NUMBER;
19744         hash = (53 * hash) + getScanMetrics().hashCode();
19745       }
19746       hash = (29 * hash) + getUnknownFields().hashCode();
19747       memoizedHashCode = hash;
19748       return hash;
19749     }
19750 
parseFrom( com.google.protobuf.ByteString data)19751     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
19752         com.google.protobuf.ByteString data)
19753         throws com.google.protobuf.InvalidProtocolBufferException {
19754       return PARSER.parseFrom(data);
19755     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19756     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
19757         com.google.protobuf.ByteString data,
19758         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19759         throws com.google.protobuf.InvalidProtocolBufferException {
19760       return PARSER.parseFrom(data, extensionRegistry);
19761     }
parseFrom(byte[] data)19762     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data)
19763         throws com.google.protobuf.InvalidProtocolBufferException {
19764       return PARSER.parseFrom(data);
19765     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19766     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
19767         byte[] data,
19768         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19769         throws com.google.protobuf.InvalidProtocolBufferException {
19770       return PARSER.parseFrom(data, extensionRegistry);
19771     }
parseFrom(java.io.InputStream input)19772     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input)
19773         throws java.io.IOException {
19774       return PARSER.parseFrom(input);
19775     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19776     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
19777         java.io.InputStream input,
19778         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19779         throws java.io.IOException {
19780       return PARSER.parseFrom(input, extensionRegistry);
19781     }
parseDelimitedFrom(java.io.InputStream input)19782     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input)
19783         throws java.io.IOException {
19784       return PARSER.parseDelimitedFrom(input);
19785     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19786     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseDelimitedFrom(
19787         java.io.InputStream input,
19788         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19789         throws java.io.IOException {
19790       return PARSER.parseDelimitedFrom(input, extensionRegistry);
19791     }
parseFrom( com.google.protobuf.CodedInputStream input)19792     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
19793         com.google.protobuf.CodedInputStream input)
19794         throws java.io.IOException {
19795       return PARSER.parseFrom(input);
19796     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19797     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(
19798         com.google.protobuf.CodedInputStream input,
19799         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
19800         throws java.io.IOException {
19801       return PARSER.parseFrom(input, extensionRegistry);
19802     }
19803 
newBuilder()19804     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()19805     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype)19806     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse prototype) {
19807       return newBuilder().mergeFrom(prototype);
19808     }
toBuilder()19809     public Builder toBuilder() { return newBuilder(this); }
19810 
19811     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)19812     protected Builder newBuilderForType(
19813         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19814       Builder builder = new Builder(parent);
19815       return builder;
19816     }
19817     /**
19818      * Protobuf type {@code ScanResponse}
19819      *
19820      * <pre>
19821      **
19822      * The scan response. If there are no more results, more_results will
19823      * be false.  If it is not specified, it means there are more.
19824      * </pre>
19825      */
19826     public static final class Builder extends
19827         com.google.protobuf.GeneratedMessage.Builder<Builder>
19828        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponseOrBuilder {
19829       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()19830           getDescriptor() {
19831         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor;
19832       }
19833 
19834       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()19835           internalGetFieldAccessorTable() {
19836         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable
19837             .ensureFieldAccessorsInitialized(
19838                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class);
19839       }
19840 
19841       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.newBuilder()
Builder()19842       private Builder() {
19843         maybeForceBuilderInitialization();
19844       }
19845 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)19846       private Builder(
19847           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
19848         super(parent);
19849         maybeForceBuilderInitialization();
19850       }
maybeForceBuilderInitialization()19851       private void maybeForceBuilderInitialization() {
19852         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
19853           getResultsFieldBuilder();
19854           getScanMetricsFieldBuilder();
19855         }
19856       }
create()19857       private static Builder create() {
19858         return new Builder();
19859       }
19860 
clear()19861       public Builder clear() {
19862         super.clear();
19863         cellsPerResult_ = java.util.Collections.emptyList();
19864         bitField0_ = (bitField0_ & ~0x00000001);
19865         scannerId_ = 0L;
19866         bitField0_ = (bitField0_ & ~0x00000002);
19867         moreResults_ = false;
19868         bitField0_ = (bitField0_ & ~0x00000004);
19869         ttl_ = 0;
19870         bitField0_ = (bitField0_ & ~0x00000008);
19871         if (resultsBuilder_ == null) {
19872           results_ = java.util.Collections.emptyList();
19873           bitField0_ = (bitField0_ & ~0x00000010);
19874         } else {
19875           resultsBuilder_.clear();
19876         }
19877         stale_ = false;
19878         bitField0_ = (bitField0_ & ~0x00000020);
19879         partialFlagPerResult_ = java.util.Collections.emptyList();
19880         bitField0_ = (bitField0_ & ~0x00000040);
19881         moreResultsInRegion_ = false;
19882         bitField0_ = (bitField0_ & ~0x00000080);
19883         heartbeatMessage_ = false;
19884         bitField0_ = (bitField0_ & ~0x00000100);
19885         if (scanMetricsBuilder_ == null) {
19886           scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
19887         } else {
19888           scanMetricsBuilder_.clear();
19889         }
19890         bitField0_ = (bitField0_ & ~0x00000200);
19891         return this;
19892       }
19893 
clone()19894       public Builder clone() {
19895         return create().mergeFrom(buildPartial());
19896       }
19897 
19898       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()19899           getDescriptorForType() {
19900         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor;
19901       }
19902 
getDefaultInstanceForType()19903       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse getDefaultInstanceForType() {
19904         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
19905       }
19906 
build()19907       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse build() {
19908         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = buildPartial();
19909         if (!result.isInitialized()) {
19910           throw newUninitializedMessageException(result);
19911         }
19912         return result;
19913       }
19914 
buildPartial()19915       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse buildPartial() {
19916         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse(this);
19917         int from_bitField0_ = bitField0_;
19918         int to_bitField0_ = 0;
19919         if (((bitField0_ & 0x00000001) == 0x00000001)) {
19920           cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_);
19921           bitField0_ = (bitField0_ & ~0x00000001);
19922         }
19923         result.cellsPerResult_ = cellsPerResult_;
19924         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
19925           to_bitField0_ |= 0x00000001;
19926         }
19927         result.scannerId_ = scannerId_;
19928         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
19929           to_bitField0_ |= 0x00000002;
19930         }
19931         result.moreResults_ = moreResults_;
19932         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
19933           to_bitField0_ |= 0x00000004;
19934         }
19935         result.ttl_ = ttl_;
19936         if (resultsBuilder_ == null) {
19937           if (((bitField0_ & 0x00000010) == 0x00000010)) {
19938             results_ = java.util.Collections.unmodifiableList(results_);
19939             bitField0_ = (bitField0_ & ~0x00000010);
19940           }
19941           result.results_ = results_;
19942         } else {
19943           result.results_ = resultsBuilder_.build();
19944         }
19945         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
19946           to_bitField0_ |= 0x00000008;
19947         }
19948         result.stale_ = stale_;
19949         if (((bitField0_ & 0x00000040) == 0x00000040)) {
19950           partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_);
19951           bitField0_ = (bitField0_ & ~0x00000040);
19952         }
19953         result.partialFlagPerResult_ = partialFlagPerResult_;
19954         if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
19955           to_bitField0_ |= 0x00000010;
19956         }
19957         result.moreResultsInRegion_ = moreResultsInRegion_;
19958         if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
19959           to_bitField0_ |= 0x00000020;
19960         }
19961         result.heartbeatMessage_ = heartbeatMessage_;
19962         if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
19963           to_bitField0_ |= 0x00000040;
19964         }
19965         if (scanMetricsBuilder_ == null) {
19966           result.scanMetrics_ = scanMetrics_;
19967         } else {
19968           result.scanMetrics_ = scanMetricsBuilder_.build();
19969         }
19970         result.bitField0_ = to_bitField0_;
19971         onBuilt();
19972         return result;
19973       }
19974 
mergeFrom(com.google.protobuf.Message other)19975       public Builder mergeFrom(com.google.protobuf.Message other) {
19976         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) {
19977           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)other);
19978         } else {
19979           super.mergeFrom(other);
19980           return this;
19981         }
19982       }
19983 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other)19984       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other) {
19985         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()) return this;
19986         if (!other.cellsPerResult_.isEmpty()) {
19987           if (cellsPerResult_.isEmpty()) {
19988             cellsPerResult_ = other.cellsPerResult_;
19989             bitField0_ = (bitField0_ & ~0x00000001);
19990           } else {
19991             ensureCellsPerResultIsMutable();
19992             cellsPerResult_.addAll(other.cellsPerResult_);
19993           }
19994           onChanged();
19995         }
19996         if (other.hasScannerId()) {
19997           setScannerId(other.getScannerId());
19998         }
19999         if (other.hasMoreResults()) {
20000           setMoreResults(other.getMoreResults());
20001         }
20002         if (other.hasTtl()) {
20003           setTtl(other.getTtl());
20004         }
20005         if (resultsBuilder_ == null) {
20006           if (!other.results_.isEmpty()) {
20007             if (results_.isEmpty()) {
20008               results_ = other.results_;
20009               bitField0_ = (bitField0_ & ~0x00000010);
20010             } else {
20011               ensureResultsIsMutable();
20012               results_.addAll(other.results_);
20013             }
20014             onChanged();
20015           }
20016         } else {
20017           if (!other.results_.isEmpty()) {
20018             if (resultsBuilder_.isEmpty()) {
20019               resultsBuilder_.dispose();
20020               resultsBuilder_ = null;
20021               results_ = other.results_;
20022               bitField0_ = (bitField0_ & ~0x00000010);
20023               resultsBuilder_ =
20024                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
20025                    getResultsFieldBuilder() : null;
20026             } else {
20027               resultsBuilder_.addAllMessages(other.results_);
20028             }
20029           }
20030         }
20031         if (other.hasStale()) {
20032           setStale(other.getStale());
20033         }
20034         if (!other.partialFlagPerResult_.isEmpty()) {
20035           if (partialFlagPerResult_.isEmpty()) {
20036             partialFlagPerResult_ = other.partialFlagPerResult_;
20037             bitField0_ = (bitField0_ & ~0x00000040);
20038           } else {
20039             ensurePartialFlagPerResultIsMutable();
20040             partialFlagPerResult_.addAll(other.partialFlagPerResult_);
20041           }
20042           onChanged();
20043         }
20044         if (other.hasMoreResultsInRegion()) {
20045           setMoreResultsInRegion(other.getMoreResultsInRegion());
20046         }
20047         if (other.hasHeartbeatMessage()) {
20048           setHeartbeatMessage(other.getHeartbeatMessage());
20049         }
20050         if (other.hasScanMetrics()) {
20051           mergeScanMetrics(other.getScanMetrics());
20052         }
20053         this.mergeUnknownFields(other.getUnknownFields());
20054         return this;
20055       }
20056 
isInitialized()20057       public final boolean isInitialized() {
20058         return true;
20059       }
20060 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)20061       public Builder mergeFrom(
20062           com.google.protobuf.CodedInputStream input,
20063           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
20064           throws java.io.IOException {
20065         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parsedMessage = null;
20066         try {
20067           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
20068         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
20069           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) e.getUnfinishedMessage();
20070           throw e;
20071         } finally {
20072           if (parsedMessage != null) {
20073             mergeFrom(parsedMessage);
20074           }
20075         }
20076         return this;
20077       }
20078       private int bitField0_;
20079 
20080       // repeated uint32 cells_per_result = 1;
20081       private java.util.List<java.lang.Integer> cellsPerResult_ = java.util.Collections.emptyList();
ensureCellsPerResultIsMutable()20082       private void ensureCellsPerResultIsMutable() {
20083         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
20084           cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(cellsPerResult_);
20085           bitField0_ |= 0x00000001;
20086          }
20087       }
20088       /**
20089        * <code>repeated uint32 cells_per_result = 1;</code>
20090        *
20091        * <pre>
20092        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20093        * of all Cells serialized out as one cellblock BUT responses from a server
20094        * have their Cells grouped by Result.  So we can reconstitute the
20095        * Results on the client-side, this field is a list of counts of Cells
20096        * in each Result that makes up the response.  For example, if this field
20097        * has 3, 3, 3 in it, then we know that on the client, we are to make
20098        * three Results each of three Cells each.
20099        * </pre>
20100        */
20101       public java.util.List<java.lang.Integer>
getCellsPerResultList()20102           getCellsPerResultList() {
20103         return java.util.Collections.unmodifiableList(cellsPerResult_);
20104       }
20105       /**
20106        * <code>repeated uint32 cells_per_result = 1;</code>
20107        *
20108        * <pre>
20109        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20110        * of all Cells serialized out as one cellblock BUT responses from a server
20111        * have their Cells grouped by Result.  So we can reconstitute the
20112        * Results on the client-side, this field is a list of counts of Cells
20113        * in each Result that makes up the response.  For example, if this field
20114        * has 3, 3, 3 in it, then we know that on the client, we are to make
20115        * three Results each of three Cells each.
20116        * </pre>
20117        */
getCellsPerResultCount()20118       public int getCellsPerResultCount() {
20119         return cellsPerResult_.size();
20120       }
20121       /**
20122        * <code>repeated uint32 cells_per_result = 1;</code>
20123        *
20124        * <pre>
20125        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20126        * of all Cells serialized out as one cellblock BUT responses from a server
20127        * have their Cells grouped by Result.  So we can reconstitute the
20128        * Results on the client-side, this field is a list of counts of Cells
20129        * in each Result that makes up the response.  For example, if this field
20130        * has 3, 3, 3 in it, then we know that on the client, we are to make
20131        * three Results each of three Cells each.
20132        * </pre>
20133        */
getCellsPerResult(int index)20134       public int getCellsPerResult(int index) {
20135         return cellsPerResult_.get(index);
20136       }
20137       /**
20138        * <code>repeated uint32 cells_per_result = 1;</code>
20139        *
20140        * <pre>
20141        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20142        * of all Cells serialized out as one cellblock BUT responses from a server
20143        * have their Cells grouped by Result.  So we can reconstitute the
20144        * Results on the client-side, this field is a list of counts of Cells
20145        * in each Result that makes up the response.  For example, if this field
20146        * has 3, 3, 3 in it, then we know that on the client, we are to make
20147        * three Results each of three Cells each.
20148        * </pre>
20149        */
setCellsPerResult( int index, int value)20150       public Builder setCellsPerResult(
20151           int index, int value) {
20152         ensureCellsPerResultIsMutable();
20153         cellsPerResult_.set(index, value);
20154         onChanged();
20155         return this;
20156       }
20157       /**
20158        * <code>repeated uint32 cells_per_result = 1;</code>
20159        *
20160        * <pre>
20161        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20162        * of all Cells serialized out as one cellblock BUT responses from a server
20163        * have their Cells grouped by Result.  So we can reconstitute the
20164        * Results on the client-side, this field is a list of counts of Cells
20165        * in each Result that makes up the response.  For example, if this field
20166        * has 3, 3, 3 in it, then we know that on the client, we are to make
20167        * three Results each of three Cells each.
20168        * </pre>
20169        */
addCellsPerResult(int value)20170       public Builder addCellsPerResult(int value) {
20171         ensureCellsPerResultIsMutable();
20172         cellsPerResult_.add(value);
20173         onChanged();
20174         return this;
20175       }
20176       /**
20177        * <code>repeated uint32 cells_per_result = 1;</code>
20178        *
20179        * <pre>
20180        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20181        * of all Cells serialized out as one cellblock BUT responses from a server
20182        * have their Cells grouped by Result.  So we can reconstitute the
20183        * Results on the client-side, this field is a list of counts of Cells
20184        * in each Result that makes up the response.  For example, if this field
20185        * has 3, 3, 3 in it, then we know that on the client, we are to make
20186        * three Results each of three Cells each.
20187        * </pre>
20188        */
addAllCellsPerResult( java.lang.Iterable<? extends java.lang.Integer> values)20189       public Builder addAllCellsPerResult(
20190           java.lang.Iterable<? extends java.lang.Integer> values) {
20191         ensureCellsPerResultIsMutable();
20192         super.addAll(values, cellsPerResult_);
20193         onChanged();
20194         return this;
20195       }
20196       /**
20197        * <code>repeated uint32 cells_per_result = 1;</code>
20198        *
20199        * <pre>
20200        * This field is filled in if we are doing cellblocks.  A cellblock is made up
20201        * of all Cells serialized out as one cellblock BUT responses from a server
20202        * have their Cells grouped by Result.  So we can reconstitute the
20203        * Results on the client-side, this field is a list of counts of Cells
20204        * in each Result that makes up the response.  For example, if this field
20205        * has 3, 3, 3 in it, then we know that on the client, we are to make
20206        * three Results each of three Cells each.
20207        * </pre>
20208        */
clearCellsPerResult()20209       public Builder clearCellsPerResult() {
20210         cellsPerResult_ = java.util.Collections.emptyList();
20211         bitField0_ = (bitField0_ & ~0x00000001);
20212         onChanged();
20213         return this;
20214       }
20215 
20216       // optional uint64 scanner_id = 2;
20217       private long scannerId_ ;
20218       /**
20219        * <code>optional uint64 scanner_id = 2;</code>
20220        */
hasScannerId()20221       public boolean hasScannerId() {
20222         return ((bitField0_ & 0x00000002) == 0x00000002);
20223       }
20224       /**
20225        * <code>optional uint64 scanner_id = 2;</code>
20226        */
getScannerId()20227       public long getScannerId() {
20228         return scannerId_;
20229       }
20230       /**
20231        * <code>optional uint64 scanner_id = 2;</code>
20232        */
setScannerId(long value)20233       public Builder setScannerId(long value) {
20234         bitField0_ |= 0x00000002;
20235         scannerId_ = value;
20236         onChanged();
20237         return this;
20238       }
20239       /**
20240        * <code>optional uint64 scanner_id = 2;</code>
20241        */
clearScannerId()20242       public Builder clearScannerId() {
20243         bitField0_ = (bitField0_ & ~0x00000002);
20244         scannerId_ = 0L;
20245         onChanged();
20246         return this;
20247       }
20248 
20249       // optional bool more_results = 3;
20250       private boolean moreResults_ ;
20251       /**
20252        * <code>optional bool more_results = 3;</code>
20253        */
hasMoreResults()20254       public boolean hasMoreResults() {
20255         return ((bitField0_ & 0x00000004) == 0x00000004);
20256       }
20257       /**
20258        * <code>optional bool more_results = 3;</code>
20259        */
getMoreResults()20260       public boolean getMoreResults() {
20261         return moreResults_;
20262       }
20263       /**
20264        * <code>optional bool more_results = 3;</code>
20265        */
setMoreResults(boolean value)20266       public Builder setMoreResults(boolean value) {
20267         bitField0_ |= 0x00000004;
20268         moreResults_ = value;
20269         onChanged();
20270         return this;
20271       }
20272       /**
20273        * <code>optional bool more_results = 3;</code>
20274        */
clearMoreResults()20275       public Builder clearMoreResults() {
20276         bitField0_ = (bitField0_ & ~0x00000004);
20277         moreResults_ = false;
20278         onChanged();
20279         return this;
20280       }
20281 
20282       // optional uint32 ttl = 4;
20283       private int ttl_ ;
20284       /**
20285        * <code>optional uint32 ttl = 4;</code>
20286        */
hasTtl()20287       public boolean hasTtl() {
20288         return ((bitField0_ & 0x00000008) == 0x00000008);
20289       }
20290       /**
20291        * <code>optional uint32 ttl = 4;</code>
20292        */
getTtl()20293       public int getTtl() {
20294         return ttl_;
20295       }
20296       /**
20297        * <code>optional uint32 ttl = 4;</code>
20298        */
setTtl(int value)20299       public Builder setTtl(int value) {
20300         bitField0_ |= 0x00000008;
20301         ttl_ = value;
20302         onChanged();
20303         return this;
20304       }
20305       /**
20306        * <code>optional uint32 ttl = 4;</code>
20307        */
clearTtl()20308       public Builder clearTtl() {
20309         bitField0_ = (bitField0_ & ~0x00000008);
20310         ttl_ = 0;
20311         onChanged();
20312         return this;
20313       }
20314 
20315       // repeated .Result results = 5;
20316       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_ =
20317         java.util.Collections.emptyList();
ensureResultsIsMutable()20318       private void ensureResultsIsMutable() {
20319         if (!((bitField0_ & 0x00000010) == 0x00000010)) {
20320           results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>(results_);
20321           bitField0_ |= 0x00000010;
20322          }
20323       }
20324 
20325       private com.google.protobuf.RepeatedFieldBuilder<
20326           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultsBuilder_;
20327 
20328       /**
20329        * <code>repeated .Result results = 5;</code>
20330        *
20331        * <pre>
20332        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20333        * This field is mutually exclusive with cells_per_result (since the Cells will
20334        * be inside the pb'd Result)
20335        * </pre>
20336        */
getResultsList()20337       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() {
20338         if (resultsBuilder_ == null) {
20339           return java.util.Collections.unmodifiableList(results_);
20340         } else {
20341           return resultsBuilder_.getMessageList();
20342         }
20343       }
20344       /**
20345        * <code>repeated .Result results = 5;</code>
20346        *
20347        * <pre>
20348        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20349        * This field is mutually exclusive with cells_per_result (since the Cells will
20350        * be inside the pb'd Result)
20351        * </pre>
20352        */
getResultsCount()20353       public int getResultsCount() {
20354         if (resultsBuilder_ == null) {
20355           return results_.size();
20356         } else {
20357           return resultsBuilder_.getCount();
20358         }
20359       }
20360       /**
20361        * <code>repeated .Result results = 5;</code>
20362        *
20363        * <pre>
20364        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20365        * This field is mutually exclusive with cells_per_result (since the Cells will
20366        * be inside the pb'd Result)
20367        * </pre>
20368        */
getResults(int index)20369       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) {
20370         if (resultsBuilder_ == null) {
20371           return results_.get(index);
20372         } else {
20373           return resultsBuilder_.getMessage(index);
20374         }
20375       }
20376       /**
20377        * <code>repeated .Result results = 5;</code>
20378        *
20379        * <pre>
20380        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20381        * This field is mutually exclusive with cells_per_result (since the Cells will
20382        * be inside the pb'd Result)
20383        * </pre>
20384        */
setResults( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)20385       public Builder setResults(
20386           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
20387         if (resultsBuilder_ == null) {
20388           if (value == null) {
20389             throw new NullPointerException();
20390           }
20391           ensureResultsIsMutable();
20392           results_.set(index, value);
20393           onChanged();
20394         } else {
20395           resultsBuilder_.setMessage(index, value);
20396         }
20397         return this;
20398       }
20399       /**
20400        * <code>repeated .Result results = 5;</code>
20401        *
20402        * <pre>
20403        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20404        * This field is mutually exclusive with cells_per_result (since the Cells will
20405        * be inside the pb'd Result)
20406        * </pre>
20407        */
setResults( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)20408       public Builder setResults(
20409           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
20410         if (resultsBuilder_ == null) {
20411           ensureResultsIsMutable();
20412           results_.set(index, builderForValue.build());
20413           onChanged();
20414         } else {
20415           resultsBuilder_.setMessage(index, builderForValue.build());
20416         }
20417         return this;
20418       }
20419       /**
20420        * <code>repeated .Result results = 5;</code>
20421        *
20422        * <pre>
20423        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20424        * This field is mutually exclusive with cells_per_result (since the Cells will
20425        * be inside the pb'd Result)
20426        * </pre>
20427        */
addResults(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)20428       public Builder addResults(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
20429         if (resultsBuilder_ == null) {
20430           if (value == null) {
20431             throw new NullPointerException();
20432           }
20433           ensureResultsIsMutable();
20434           results_.add(value);
20435           onChanged();
20436         } else {
20437           resultsBuilder_.addMessage(value);
20438         }
20439         return this;
20440       }
20441       /**
20442        * <code>repeated .Result results = 5;</code>
20443        *
20444        * <pre>
20445        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20446        * This field is mutually exclusive with cells_per_result (since the Cells will
20447        * be inside the pb'd Result)
20448        * </pre>
20449        */
addResults( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)20450       public Builder addResults(
20451           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
20452         if (resultsBuilder_ == null) {
20453           if (value == null) {
20454             throw new NullPointerException();
20455           }
20456           ensureResultsIsMutable();
20457           results_.add(index, value);
20458           onChanged();
20459         } else {
20460           resultsBuilder_.addMessage(index, value);
20461         }
20462         return this;
20463       }
20464       /**
20465        * <code>repeated .Result results = 5;</code>
20466        *
20467        * <pre>
20468        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20469        * This field is mutually exclusive with cells_per_result (since the Cells will
20470        * be inside the pb'd Result)
20471        * </pre>
20472        */
addResults( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)20473       public Builder addResults(
20474           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
20475         if (resultsBuilder_ == null) {
20476           ensureResultsIsMutable();
20477           results_.add(builderForValue.build());
20478           onChanged();
20479         } else {
20480           resultsBuilder_.addMessage(builderForValue.build());
20481         }
20482         return this;
20483       }
20484       /**
20485        * <code>repeated .Result results = 5;</code>
20486        *
20487        * <pre>
20488        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20489        * This field is mutually exclusive with cells_per_result (since the Cells will
20490        * be inside the pb'd Result)
20491        * </pre>
20492        */
addResults( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)20493       public Builder addResults(
20494           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
20495         if (resultsBuilder_ == null) {
20496           ensureResultsIsMutable();
20497           results_.add(index, builderForValue.build());
20498           onChanged();
20499         } else {
20500           resultsBuilder_.addMessage(index, builderForValue.build());
20501         }
20502         return this;
20503       }
20504       /**
20505        * <code>repeated .Result results = 5;</code>
20506        *
20507        * <pre>
20508        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20509        * This field is mutually exclusive with cells_per_result (since the Cells will
20510        * be inside the pb'd Result)
20511        * </pre>
20512        */
addAllResults( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> values)20513       public Builder addAllResults(
20514           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> values) {
20515         if (resultsBuilder_ == null) {
20516           ensureResultsIsMutable();
20517           super.addAll(values, results_);
20518           onChanged();
20519         } else {
20520           resultsBuilder_.addAllMessages(values);
20521         }
20522         return this;
20523       }
20524       /**
20525        * <code>repeated .Result results = 5;</code>
20526        *
20527        * <pre>
20528        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20529        * This field is mutually exclusive with cells_per_result (since the Cells will
20530        * be inside the pb'd Result)
20531        * </pre>
20532        */
clearResults()20533       public Builder clearResults() {
20534         if (resultsBuilder_ == null) {
20535           results_ = java.util.Collections.emptyList();
20536           bitField0_ = (bitField0_ & ~0x00000010);
20537           onChanged();
20538         } else {
20539           resultsBuilder_.clear();
20540         }
20541         return this;
20542       }
20543       /**
20544        * <code>repeated .Result results = 5;</code>
20545        *
20546        * <pre>
20547        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20548        * This field is mutually exclusive with cells_per_result (since the Cells will
20549        * be inside the pb'd Result)
20550        * </pre>
20551        */
removeResults(int index)20552       public Builder removeResults(int index) {
20553         if (resultsBuilder_ == null) {
20554           ensureResultsIsMutable();
20555           results_.remove(index);
20556           onChanged();
20557         } else {
20558           resultsBuilder_.remove(index);
20559         }
20560         return this;
20561       }
20562       /**
20563        * <code>repeated .Result results = 5;</code>
20564        *
20565        * <pre>
20566        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20567        * This field is mutually exclusive with cells_per_result (since the Cells will
20568        * be inside the pb'd Result)
20569        * </pre>
20570        */
getResultsBuilder( int index)20571       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultsBuilder(
20572           int index) {
20573         return getResultsFieldBuilder().getBuilder(index);
20574       }
20575       /**
20576        * <code>repeated .Result results = 5;</code>
20577        *
20578        * <pre>
20579        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20580        * This field is mutually exclusive with cells_per_result (since the Cells will
20581        * be inside the pb'd Result)
20582        * </pre>
20583        */
getResultsOrBuilder( int index)20584       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder(
20585           int index) {
20586         if (resultsBuilder_ == null) {
20587           return results_.get(index);  } else {
20588           return resultsBuilder_.getMessageOrBuilder(index);
20589         }
20590       }
20591       /**
20592        * <code>repeated .Result results = 5;</code>
20593        *
20594        * <pre>
20595        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20596        * This field is mutually exclusive with cells_per_result (since the Cells will
20597        * be inside the pb'd Result)
20598        * </pre>
20599        */
20600       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsOrBuilderList()20601            getResultsOrBuilderList() {
20602         if (resultsBuilder_ != null) {
20603           return resultsBuilder_.getMessageOrBuilderList();
20604         } else {
20605           return java.util.Collections.unmodifiableList(results_);
20606         }
20607       }
20608       /**
20609        * <code>repeated .Result results = 5;</code>
20610        *
20611        * <pre>
20612        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20613        * This field is mutually exclusive with cells_per_result (since the Cells will
20614        * be inside the pb'd Result)
20615        * </pre>
20616        */
addResultsBuilder()20617       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder() {
20618         return getResultsFieldBuilder().addBuilder(
20619             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
20620       }
20621       /**
20622        * <code>repeated .Result results = 5;</code>
20623        *
20624        * <pre>
20625        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20626        * This field is mutually exclusive with cells_per_result (since the Cells will
20627        * be inside the pb'd Result)
20628        * </pre>
20629        */
addResultsBuilder( int index)20630       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder addResultsBuilder(
20631           int index) {
20632         return getResultsFieldBuilder().addBuilder(
20633             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance());
20634       }
20635       /**
20636        * <code>repeated .Result results = 5;</code>
20637        *
20638        * <pre>
20639        * If cells are not carried in an accompanying cellblock, then they are pb'd here.
20640        * This field is mutually exclusive with cells_per_result (since the Cells will
20641        * be inside the pb'd Result)
20642        * </pre>
20643        */
20644       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder>
getResultsBuilderList()20645            getResultsBuilderList() {
20646         return getResultsFieldBuilder().getBuilderList();
20647       }
20648       private com.google.protobuf.RepeatedFieldBuilder<
20649           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultsFieldBuilder()20650           getResultsFieldBuilder() {
20651         if (resultsBuilder_ == null) {
20652           resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
20653               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
20654                   results_,
20655                   ((bitField0_ & 0x00000010) == 0x00000010),
20656                   getParentForChildren(),
20657                   isClean());
20658           results_ = null;
20659         }
20660         return resultsBuilder_;
20661       }
20662 
20663       // optional bool stale = 6;
20664       private boolean stale_ ;
20665       /**
20666        * <code>optional bool stale = 6;</code>
20667        */
hasStale()20668       public boolean hasStale() {
20669         return ((bitField0_ & 0x00000020) == 0x00000020);
20670       }
20671       /**
20672        * <code>optional bool stale = 6;</code>
20673        */
getStale()20674       public boolean getStale() {
20675         return stale_;
20676       }
20677       /**
20678        * <code>optional bool stale = 6;</code>
20679        */
setStale(boolean value)20680       public Builder setStale(boolean value) {
20681         bitField0_ |= 0x00000020;
20682         stale_ = value;
20683         onChanged();
20684         return this;
20685       }
20686       /**
20687        * <code>optional bool stale = 6;</code>
20688        */
clearStale()20689       public Builder clearStale() {
20690         bitField0_ = (bitField0_ & ~0x00000020);
20691         stale_ = false;
20692         onChanged();
20693         return this;
20694       }
20695 
20696       // repeated bool partial_flag_per_result = 7;
20697       private java.util.List<java.lang.Boolean> partialFlagPerResult_ = java.util.Collections.emptyList();
ensurePartialFlagPerResultIsMutable()20698       private void ensurePartialFlagPerResultIsMutable() {
20699         if (!((bitField0_ & 0x00000040) == 0x00000040)) {
20700           partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(partialFlagPerResult_);
20701           bitField0_ |= 0x00000040;
20702          }
20703       }
20704       /**
20705        * <code>repeated bool partial_flag_per_result = 7;</code>
20706        *
20707        * <pre>
20708        * This field is filled in if we are doing cellblocks. In the event that a row
20709        * could not fit all of its cells into a single RPC chunk, the results will be
20710        * returned as partials, and reconstructed into a complete result on the client
20711        * side. This field is a list of flags indicating whether or not the result
20712        * that the cells belong to is a partial result. For example, if this field
20713        * has false, false, true in it, then we know that on the client side, we need to
20714        * make another RPC request since the last result was only a partial.
20715        * </pre>
20716        */
20717       public java.util.List<java.lang.Boolean>
getPartialFlagPerResultList()20718           getPartialFlagPerResultList() {
20719         return java.util.Collections.unmodifiableList(partialFlagPerResult_);
20720       }
20721       /**
20722        * <code>repeated bool partial_flag_per_result = 7;</code>
20723        *
20724        * <pre>
20725        * This field is filled in if we are doing cellblocks. In the event that a row
20726        * could not fit all of its cells into a single RPC chunk, the results will be
20727        * returned as partials, and reconstructed into a complete result on the client
20728        * side. This field is a list of flags indicating whether or not the result
20729        * that the cells belong to is a partial result. For example, if this field
20730        * has false, false, true in it, then we know that on the client side, we need to
20731        * make another RPC request since the last result was only a partial.
20732        * </pre>
20733        */
getPartialFlagPerResultCount()20734       public int getPartialFlagPerResultCount() {
20735         return partialFlagPerResult_.size();
20736       }
20737       /**
20738        * <code>repeated bool partial_flag_per_result = 7;</code>
20739        *
20740        * <pre>
20741        * This field is filled in if we are doing cellblocks. In the event that a row
20742        * could not fit all of its cells into a single RPC chunk, the results will be
20743        * returned as partials, and reconstructed into a complete result on the client
20744        * side. This field is a list of flags indicating whether or not the result
20745        * that the cells belong to is a partial result. For example, if this field
20746        * has false, false, true in it, then we know that on the client side, we need to
20747        * make another RPC request since the last result was only a partial.
20748        * </pre>
20749        */
getPartialFlagPerResult(int index)20750       public boolean getPartialFlagPerResult(int index) {
20751         return partialFlagPerResult_.get(index);
20752       }
20753       /**
20754        * <code>repeated bool partial_flag_per_result = 7;</code>
20755        *
20756        * <pre>
20757        * This field is filled in if we are doing cellblocks. In the event that a row
20758        * could not fit all of its cells into a single RPC chunk, the results will be
20759        * returned as partials, and reconstructed into a complete result on the client
20760        * side. This field is a list of flags indicating whether or not the result
20761        * that the cells belong to is a partial result. For example, if this field
20762        * has false, false, true in it, then we know that on the client side, we need to
20763        * make another RPC request since the last result was only a partial.
20764        * </pre>
20765        */
setPartialFlagPerResult( int index, boolean value)20766       public Builder setPartialFlagPerResult(
20767           int index, boolean value) {
20768         ensurePartialFlagPerResultIsMutable();
20769         partialFlagPerResult_.set(index, value);
20770         onChanged();
20771         return this;
20772       }
20773       /**
20774        * <code>repeated bool partial_flag_per_result = 7;</code>
20775        *
20776        * <pre>
20777        * This field is filled in if we are doing cellblocks. In the event that a row
20778        * could not fit all of its cells into a single RPC chunk, the results will be
20779        * returned as partials, and reconstructed into a complete result on the client
20780        * side. This field is a list of flags indicating whether or not the result
20781        * that the cells belong to is a partial result. For example, if this field
20782        * has false, false, true in it, then we know that on the client side, we need to
20783        * make another RPC request since the last result was only a partial.
20784        * </pre>
20785        */
addPartialFlagPerResult(boolean value)20786       public Builder addPartialFlagPerResult(boolean value) {
20787         ensurePartialFlagPerResultIsMutable();
20788         partialFlagPerResult_.add(value);
20789         onChanged();
20790         return this;
20791       }
20792       /**
20793        * <code>repeated bool partial_flag_per_result = 7;</code>
20794        *
20795        * <pre>
20796        * This field is filled in if we are doing cellblocks. In the event that a row
20797        * could not fit all of its cells into a single RPC chunk, the results will be
20798        * returned as partials, and reconstructed into a complete result on the client
20799        * side. This field is a list of flags indicating whether or not the result
20800        * that the cells belong to is a partial result. For example, if this field
20801        * has false, false, true in it, then we know that on the client side, we need to
20802        * make another RPC request since the last result was only a partial.
20803        * </pre>
20804        */
addAllPartialFlagPerResult( java.lang.Iterable<? extends java.lang.Boolean> values)20805       public Builder addAllPartialFlagPerResult(
20806           java.lang.Iterable<? extends java.lang.Boolean> values) {
20807         ensurePartialFlagPerResultIsMutable();
20808         super.addAll(values, partialFlagPerResult_);
20809         onChanged();
20810         return this;
20811       }
20812       /**
20813        * <code>repeated bool partial_flag_per_result = 7;</code>
20814        *
20815        * <pre>
20816        * This field is filled in if we are doing cellblocks. In the event that a row
20817        * could not fit all of its cells into a single RPC chunk, the results will be
20818        * returned as partials, and reconstructed into a complete result on the client
20819        * side. This field is a list of flags indicating whether or not the result
20820        * that the cells belong to is a partial result. For example, if this field
20821        * has false, false, true in it, then we know that on the client side, we need to
20822        * make another RPC request since the last result was only a partial.
20823        * </pre>
20824        */
clearPartialFlagPerResult()20825       public Builder clearPartialFlagPerResult() {
20826         partialFlagPerResult_ = java.util.Collections.emptyList();
20827         bitField0_ = (bitField0_ & ~0x00000040);
20828         onChanged();
20829         return this;
20830       }
20831 
20832       // optional bool more_results_in_region = 8;
20833       private boolean moreResultsInRegion_ ;
20834       /**
20835        * <code>optional bool more_results_in_region = 8;</code>
20836        *
20837        * <pre>
20838        * A server may choose to limit the number of results returned to the client for
20839        * reasons such as the size in bytes or quantity of results accumulated. This field
20840        * will true when more results exist in the current region.
20841        * </pre>
20842        */
hasMoreResultsInRegion()20843       public boolean hasMoreResultsInRegion() {
20844         return ((bitField0_ & 0x00000080) == 0x00000080);
20845       }
20846       /**
20847        * <code>optional bool more_results_in_region = 8;</code>
20848        *
20849        * <pre>
20850        * A server may choose to limit the number of results returned to the client for
20851        * reasons such as the size in bytes or quantity of results accumulated. This field
20852        * will true when more results exist in the current region.
20853        * </pre>
20854        */
getMoreResultsInRegion()20855       public boolean getMoreResultsInRegion() {
20856         return moreResultsInRegion_;
20857       }
20858       /**
20859        * <code>optional bool more_results_in_region = 8;</code>
20860        *
20861        * <pre>
20862        * A server may choose to limit the number of results returned to the client for
20863        * reasons such as the size in bytes or quantity of results accumulated. This field
20864        * will true when more results exist in the current region.
20865        * </pre>
20866        */
setMoreResultsInRegion(boolean value)20867       public Builder setMoreResultsInRegion(boolean value) {
20868         bitField0_ |= 0x00000080;
20869         moreResultsInRegion_ = value;
20870         onChanged();
20871         return this;
20872       }
20873       /**
20874        * <code>optional bool more_results_in_region = 8;</code>
20875        *
20876        * <pre>
20877        * A server may choose to limit the number of results returned to the client for
20878        * reasons such as the size in bytes or quantity of results accumulated. This field
20879        * will true when more results exist in the current region.
20880        * </pre>
20881        */
clearMoreResultsInRegion()20882       public Builder clearMoreResultsInRegion() {
20883         bitField0_ = (bitField0_ & ~0x00000080);
20884         moreResultsInRegion_ = false;
20885         onChanged();
20886         return this;
20887       }
20888 
20889       // optional bool heartbeat_message = 9;
20890       private boolean heartbeatMessage_ ;
20891       /**
20892        * <code>optional bool heartbeat_message = 9;</code>
20893        *
20894        * <pre>
20895        * This field is filled in if the server is sending back a heartbeat message.
20896        * Heartbeat messages are sent back to the client to prevent the scanner from
20897        * timing out. Seeing a heartbeat message communicates to the Client that the
20898        * server would have continued to scan had the time limit not been reached.
20899        * </pre>
20900        */
hasHeartbeatMessage()20901       public boolean hasHeartbeatMessage() {
20902         return ((bitField0_ & 0x00000100) == 0x00000100);
20903       }
20904       /**
20905        * <code>optional bool heartbeat_message = 9;</code>
20906        *
20907        * <pre>
20908        * This field is filled in if the server is sending back a heartbeat message.
20909        * Heartbeat messages are sent back to the client to prevent the scanner from
20910        * timing out. Seeing a heartbeat message communicates to the Client that the
20911        * server would have continued to scan had the time limit not been reached.
20912        * </pre>
20913        */
getHeartbeatMessage()20914       public boolean getHeartbeatMessage() {
20915         return heartbeatMessage_;
20916       }
20917       /**
20918        * <code>optional bool heartbeat_message = 9;</code>
20919        *
20920        * <pre>
20921        * This field is filled in if the server is sending back a heartbeat message.
20922        * Heartbeat messages are sent back to the client to prevent the scanner from
20923        * timing out. Seeing a heartbeat message communicates to the Client that the
20924        * server would have continued to scan had the time limit not been reached.
20925        * </pre>
20926        */
setHeartbeatMessage(boolean value)20927       public Builder setHeartbeatMessage(boolean value) {
20928         bitField0_ |= 0x00000100;
20929         heartbeatMessage_ = value;
20930         onChanged();
20931         return this;
20932       }
20933       /**
20934        * <code>optional bool heartbeat_message = 9;</code>
20935        *
20936        * <pre>
20937        * This field is filled in if the server is sending back a heartbeat message.
20938        * Heartbeat messages are sent back to the client to prevent the scanner from
20939        * timing out. Seeing a heartbeat message communicates to the Client that the
20940        * server would have continued to scan had the time limit not been reached.
20941        * </pre>
20942        */
clearHeartbeatMessage()20943       public Builder clearHeartbeatMessage() {
20944         bitField0_ = (bitField0_ & ~0x00000100);
20945         heartbeatMessage_ = false;
20946         onChanged();
20947         return this;
20948       }
20949 
20950       // optional .ScanMetrics scan_metrics = 10;
20951       private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
20952       private com.google.protobuf.SingleFieldBuilder<
20953           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder> scanMetricsBuilder_;
20954       /**
20955        * <code>optional .ScanMetrics scan_metrics = 10;</code>
20956        *
20957        * <pre>
20958        * This field is filled in if the client has requested that scan metrics be tracked.
20959        * The metrics tracked here are sent back to the client to be tracked together with
20960        * the existing client side metrics.
20961        * </pre>
20962        */
hasScanMetrics()20963       public boolean hasScanMetrics() {
20964         return ((bitField0_ & 0x00000200) == 0x00000200);
20965       }
20966       /**
20967        * <code>optional .ScanMetrics scan_metrics = 10;</code>
20968        *
20969        * <pre>
20970        * This field is filled in if the client has requested that scan metrics be tracked.
20971        * The metrics tracked here are sent back to the client to be tracked together with
20972        * the existing client side metrics.
20973        * </pre>
20974        */
getScanMetrics()20975       public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() {
20976         if (scanMetricsBuilder_ == null) {
20977           return scanMetrics_;
20978         } else {
20979           return scanMetricsBuilder_.getMessage();
20980         }
20981       }
20982       /**
20983        * <code>optional .ScanMetrics scan_metrics = 10;</code>
20984        *
20985        * <pre>
20986        * This field is filled in if the client has requested that scan metrics be tracked.
20987        * The metrics tracked here are sent back to the client to be tracked together with
20988        * the existing client side metrics.
20989        * </pre>
20990        */
setScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value)20991       public Builder setScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value) {
20992         if (scanMetricsBuilder_ == null) {
20993           if (value == null) {
20994             throw new NullPointerException();
20995           }
20996           scanMetrics_ = value;
20997           onChanged();
20998         } else {
20999           scanMetricsBuilder_.setMessage(value);
21000         }
21001         bitField0_ |= 0x00000200;
21002         return this;
21003       }
21004       /**
21005        * <code>optional .ScanMetrics scan_metrics = 10;</code>
21006        *
21007        * <pre>
21008        * This field is filled in if the client has requested that scan metrics be tracked.
21009        * The metrics tracked here are sent back to the client to be tracked together with
21010        * the existing client side metrics.
21011        * </pre>
21012        */
setScanMetrics( org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder builderForValue)21013       public Builder setScanMetrics(
21014           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder builderForValue) {
21015         if (scanMetricsBuilder_ == null) {
21016           scanMetrics_ = builderForValue.build();
21017           onChanged();
21018         } else {
21019           scanMetricsBuilder_.setMessage(builderForValue.build());
21020         }
21021         bitField0_ |= 0x00000200;
21022         return this;
21023       }
21024       /**
21025        * <code>optional .ScanMetrics scan_metrics = 10;</code>
21026        *
21027        * <pre>
21028        * This field is filled in if the client has requested that scan metrics be tracked.
21029        * The metrics tracked here are sent back to the client to be tracked together with
21030        * the existing client side metrics.
21031        * </pre>
21032        */
mergeScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value)21033       public Builder mergeScanMetrics(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics value) {
21034         if (scanMetricsBuilder_ == null) {
21035           if (((bitField0_ & 0x00000200) == 0x00000200) &&
21036               scanMetrics_ != org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) {
21037             scanMetrics_ =
21038               org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder(scanMetrics_).mergeFrom(value).buildPartial();
21039           } else {
21040             scanMetrics_ = value;
21041           }
21042           onChanged();
21043         } else {
21044           scanMetricsBuilder_.mergeFrom(value);
21045         }
21046         bitField0_ |= 0x00000200;
21047         return this;
21048       }
21049       /**
21050        * <code>optional .ScanMetrics scan_metrics = 10;</code>
21051        *
21052        * <pre>
21053        * This field is filled in if the client has requested that scan metrics be tracked.
21054        * The metrics tracked here are sent back to the client to be tracked together with
21055        * the existing client side metrics.
21056        * </pre>
21057        */
clearScanMetrics()21058       public Builder clearScanMetrics() {
21059         if (scanMetricsBuilder_ == null) {
21060           scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
21061           onChanged();
21062         } else {
21063           scanMetricsBuilder_.clear();
21064         }
21065         bitField0_ = (bitField0_ & ~0x00000200);
21066         return this;
21067       }
21068       /**
21069        * <code>optional .ScanMetrics scan_metrics = 10;</code>
21070        *
21071        * <pre>
21072        * This field is filled in if the client has requested that scan metrics be tracked.
21073        * The metrics tracked here are sent back to the client to be tracked together with
21074        * the existing client side metrics.
21075        * </pre>
21076        */
getScanMetricsBuilder()21077       public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder getScanMetricsBuilder() {
21078         bitField0_ |= 0x00000200;
21079         onChanged();
21080         return getScanMetricsFieldBuilder().getBuilder();
21081       }
21082       /**
21083        * <code>optional .ScanMetrics scan_metrics = 10;</code>
21084        *
21085        * <pre>
21086        * This field is filled in if the client has requested that scan metrics be tracked.
21087        * The metrics tracked here are sent back to the client to be tracked together with
21088        * the existing client side metrics.
21089        * </pre>
21090        */
getScanMetricsOrBuilder()21091       public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() {
21092         if (scanMetricsBuilder_ != null) {
21093           return scanMetricsBuilder_.getMessageOrBuilder();
21094         } else {
21095           return scanMetrics_;
21096         }
21097       }
21098       /**
21099        * <code>optional .ScanMetrics scan_metrics = 10;</code>
21100        *
21101        * <pre>
21102        * This field is filled in if the client has requested that scan metrics be tracked.
21103        * The metrics tracked here are sent back to the client to be tracked together with
21104        * the existing client side metrics.
21105        * </pre>
21106        */
21107       private com.google.protobuf.SingleFieldBuilder<
21108           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>
getScanMetricsFieldBuilder()21109           getScanMetricsFieldBuilder() {
21110         if (scanMetricsBuilder_ == null) {
21111           scanMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
21112               org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder>(
21113                   scanMetrics_,
21114                   getParentForChildren(),
21115                   isClean());
21116           scanMetrics_ = null;
21117         }
21118         return scanMetricsBuilder_;
21119       }
21120 
21121       // @@protoc_insertion_point(builder_scope:ScanResponse)
21122     }
21123 
21124     static {
21125       defaultInstance = new ScanResponse(true);
defaultInstance.initFields()21126       defaultInstance.initFields();
21127     }
21128 
21129     // @@protoc_insertion_point(class_scope:ScanResponse)
21130   }
21131 
21132   public interface BulkLoadHFileRequestOrBuilder
21133       extends com.google.protobuf.MessageOrBuilder {
21134 
21135     // required .RegionSpecifier region = 1;
21136     /**
21137      * <code>required .RegionSpecifier region = 1;</code>
21138      */
hasRegion()21139     boolean hasRegion();
21140     /**
21141      * <code>required .RegionSpecifier region = 1;</code>
21142      */
getRegion()21143     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
21144     /**
21145      * <code>required .RegionSpecifier region = 1;</code>
21146      */
getRegionOrBuilder()21147     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
21148 
21149     // repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;
21150     /**
21151      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21152      */
21153     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>
getFamilyPathList()21154         getFamilyPathList();
21155     /**
21156      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21157      */
getFamilyPath(int index)21158     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index);
21159     /**
21160      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21161      */
getFamilyPathCount()21162     int getFamilyPathCount();
21163     /**
21164      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21165      */
21166     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathOrBuilderList()21167         getFamilyPathOrBuilderList();
21168     /**
21169      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21170      */
getFamilyPathOrBuilder( int index)21171     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
21172         int index);
21173 
21174     // optional bool assign_seq_num = 3;
21175     /**
21176      * <code>optional bool assign_seq_num = 3;</code>
21177      */
hasAssignSeqNum()21178     boolean hasAssignSeqNum();
21179     /**
21180      * <code>optional bool assign_seq_num = 3;</code>
21181      */
getAssignSeqNum()21182     boolean getAssignSeqNum();
21183   }
21184   /**
21185    * Protobuf type {@code BulkLoadHFileRequest}
21186    *
21187    * <pre>
21188    **
21189    * Atomically bulk load multiple HFiles (say from different column families)
21190    * into an open region.
21191    * </pre>
21192    */
21193   public static final class BulkLoadHFileRequest extends
21194       com.google.protobuf.GeneratedMessage
21195       implements BulkLoadHFileRequestOrBuilder {
21196     // Use BulkLoadHFileRequest.newBuilder() to construct.
BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)21197     private BulkLoadHFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21198       super(builder);
21199       this.unknownFields = builder.getUnknownFields();
21200     }
BulkLoadHFileRequest(boolean noInit)21201     private BulkLoadHFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21202 
21203     private static final BulkLoadHFileRequest defaultInstance;
getDefaultInstance()21204     public static BulkLoadHFileRequest getDefaultInstance() {
21205       return defaultInstance;
21206     }
21207 
getDefaultInstanceForType()21208     public BulkLoadHFileRequest getDefaultInstanceForType() {
21209       return defaultInstance;
21210     }
21211 
21212     private final com.google.protobuf.UnknownFieldSet unknownFields;
21213     @java.lang.Override
21214     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()21215         getUnknownFields() {
21216       return this.unknownFields;
21217     }
BulkLoadHFileRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21218     private BulkLoadHFileRequest(
21219         com.google.protobuf.CodedInputStream input,
21220         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21221         throws com.google.protobuf.InvalidProtocolBufferException {
21222       initFields();
21223       int mutable_bitField0_ = 0;
21224       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21225           com.google.protobuf.UnknownFieldSet.newBuilder();
21226       try {
21227         boolean done = false;
21228         while (!done) {
21229           int tag = input.readTag();
21230           switch (tag) {
21231             case 0:
21232               done = true;
21233               break;
21234             default: {
21235               if (!parseUnknownField(input, unknownFields,
21236                                      extensionRegistry, tag)) {
21237                 done = true;
21238               }
21239               break;
21240             }
21241             case 10: {
21242               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
21243               if (((bitField0_ & 0x00000001) == 0x00000001)) {
21244                 subBuilder = region_.toBuilder();
21245               }
21246               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
21247               if (subBuilder != null) {
21248                 subBuilder.mergeFrom(region_);
21249                 region_ = subBuilder.buildPartial();
21250               }
21251               bitField0_ |= 0x00000001;
21252               break;
21253             }
21254             case 18: {
21255               if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
21256                 familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>();
21257                 mutable_bitField0_ |= 0x00000002;
21258               }
21259               familyPath_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.PARSER, extensionRegistry));
21260               break;
21261             }
21262             case 24: {
21263               bitField0_ |= 0x00000002;
21264               assignSeqNum_ = input.readBool();
21265               break;
21266             }
21267           }
21268         }
21269       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21270         throw e.setUnfinishedMessage(this);
21271       } catch (java.io.IOException e) {
21272         throw new com.google.protobuf.InvalidProtocolBufferException(
21273             e.getMessage()).setUnfinishedMessage(this);
21274       } finally {
21275         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
21276           familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
21277         }
21278         this.unknownFields = unknownFields.build();
21279         makeExtensionsImmutable();
21280       }
21281     }
21282     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21283         getDescriptor() {
21284       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor;
21285     }
21286 
21287     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21288         internalGetFieldAccessorTable() {
21289       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable
21290           .ensureFieldAccessorsInitialized(
21291               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
21292     }
21293 
21294     public static com.google.protobuf.Parser<BulkLoadHFileRequest> PARSER =
21295         new com.google.protobuf.AbstractParser<BulkLoadHFileRequest>() {
21296       public BulkLoadHFileRequest parsePartialFrom(
21297           com.google.protobuf.CodedInputStream input,
21298           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21299           throws com.google.protobuf.InvalidProtocolBufferException {
21300         return new BulkLoadHFileRequest(input, extensionRegistry);
21301       }
21302     };
21303 
21304     @java.lang.Override
getParserForType()21305     public com.google.protobuf.Parser<BulkLoadHFileRequest> getParserForType() {
21306       return PARSER;
21307     }
21308 
21309     public interface FamilyPathOrBuilder
21310         extends com.google.protobuf.MessageOrBuilder {
21311 
21312       // required bytes family = 1;
21313       /**
21314        * <code>required bytes family = 1;</code>
21315        */
hasFamily()21316       boolean hasFamily();
21317       /**
21318        * <code>required bytes family = 1;</code>
21319        */
getFamily()21320       com.google.protobuf.ByteString getFamily();
21321 
21322       // required string path = 2;
21323       /**
21324        * <code>required string path = 2;</code>
21325        */
hasPath()21326       boolean hasPath();
21327       /**
21328        * <code>required string path = 2;</code>
21329        */
getPath()21330       java.lang.String getPath();
21331       /**
21332        * <code>required string path = 2;</code>
21333        */
21334       com.google.protobuf.ByteString
getPathBytes()21335           getPathBytes();
21336     }
21337     /**
21338      * Protobuf type {@code BulkLoadHFileRequest.FamilyPath}
21339      */
21340     public static final class FamilyPath extends
21341         com.google.protobuf.GeneratedMessage
21342         implements FamilyPathOrBuilder {
21343       // Use FamilyPath.newBuilder() to construct.
FamilyPath(com.google.protobuf.GeneratedMessage.Builder<?> builder)21344       private FamilyPath(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
21345         super(builder);
21346         this.unknownFields = builder.getUnknownFields();
21347       }
FamilyPath(boolean noInit)21348       private FamilyPath(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
21349 
21350       private static final FamilyPath defaultInstance;
getDefaultInstance()21351       public static FamilyPath getDefaultInstance() {
21352         return defaultInstance;
21353       }
21354 
getDefaultInstanceForType()21355       public FamilyPath getDefaultInstanceForType() {
21356         return defaultInstance;
21357       }
21358 
21359       private final com.google.protobuf.UnknownFieldSet unknownFields;
21360       @java.lang.Override
21361       public final com.google.protobuf.UnknownFieldSet
getUnknownFields()21362           getUnknownFields() {
21363         return this.unknownFields;
21364       }
FamilyPath( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21365       private FamilyPath(
21366           com.google.protobuf.CodedInputStream input,
21367           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21368           throws com.google.protobuf.InvalidProtocolBufferException {
21369         initFields();
21370         int mutable_bitField0_ = 0;
21371         com.google.protobuf.UnknownFieldSet.Builder unknownFields =
21372             com.google.protobuf.UnknownFieldSet.newBuilder();
21373         try {
21374           boolean done = false;
21375           while (!done) {
21376             int tag = input.readTag();
21377             switch (tag) {
21378               case 0:
21379                 done = true;
21380                 break;
21381               default: {
21382                 if (!parseUnknownField(input, unknownFields,
21383                                        extensionRegistry, tag)) {
21384                   done = true;
21385                 }
21386                 break;
21387               }
21388               case 10: {
21389                 bitField0_ |= 0x00000001;
21390                 family_ = input.readBytes();
21391                 break;
21392               }
21393               case 18: {
21394                 bitField0_ |= 0x00000002;
21395                 path_ = input.readBytes();
21396                 break;
21397               }
21398             }
21399           }
21400         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21401           throw e.setUnfinishedMessage(this);
21402         } catch (java.io.IOException e) {
21403           throw new com.google.protobuf.InvalidProtocolBufferException(
21404               e.getMessage()).setUnfinishedMessage(this);
21405         } finally {
21406           this.unknownFields = unknownFields.build();
21407           makeExtensionsImmutable();
21408         }
21409       }
21410       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21411           getDescriptor() {
21412         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
21413       }
21414 
21415       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21416           internalGetFieldAccessorTable() {
21417         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
21418             .ensureFieldAccessorsInitialized(
21419                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
21420       }
21421 
21422       public static com.google.protobuf.Parser<FamilyPath> PARSER =
21423           new com.google.protobuf.AbstractParser<FamilyPath>() {
21424         public FamilyPath parsePartialFrom(
21425             com.google.protobuf.CodedInputStream input,
21426             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21427             throws com.google.protobuf.InvalidProtocolBufferException {
21428           return new FamilyPath(input, extensionRegistry);
21429         }
21430       };
21431 
21432       @java.lang.Override
getParserForType()21433       public com.google.protobuf.Parser<FamilyPath> getParserForType() {
21434         return PARSER;
21435       }
21436 
21437       private int bitField0_;
21438       // required bytes family = 1;
21439       public static final int FAMILY_FIELD_NUMBER = 1;
21440       private com.google.protobuf.ByteString family_;
21441       /**
21442        * <code>required bytes family = 1;</code>
21443        */
hasFamily()21444       public boolean hasFamily() {
21445         return ((bitField0_ & 0x00000001) == 0x00000001);
21446       }
21447       /**
21448        * <code>required bytes family = 1;</code>
21449        */
getFamily()21450       public com.google.protobuf.ByteString getFamily() {
21451         return family_;
21452       }
21453 
21454       // required string path = 2;
21455       public static final int PATH_FIELD_NUMBER = 2;
21456       private java.lang.Object path_;
21457       /**
21458        * <code>required string path = 2;</code>
21459        */
hasPath()21460       public boolean hasPath() {
21461         return ((bitField0_ & 0x00000002) == 0x00000002);
21462       }
21463       /**
21464        * <code>required string path = 2;</code>
21465        */
getPath()21466       public java.lang.String getPath() {
21467         java.lang.Object ref = path_;
21468         if (ref instanceof java.lang.String) {
21469           return (java.lang.String) ref;
21470         } else {
21471           com.google.protobuf.ByteString bs =
21472               (com.google.protobuf.ByteString) ref;
21473           java.lang.String s = bs.toStringUtf8();
21474           if (bs.isValidUtf8()) {
21475             path_ = s;
21476           }
21477           return s;
21478         }
21479       }
21480       /**
21481        * <code>required string path = 2;</code>
21482        */
21483       public com.google.protobuf.ByteString
getPathBytes()21484           getPathBytes() {
21485         java.lang.Object ref = path_;
21486         if (ref instanceof java.lang.String) {
21487           com.google.protobuf.ByteString b =
21488               com.google.protobuf.ByteString.copyFromUtf8(
21489                   (java.lang.String) ref);
21490           path_ = b;
21491           return b;
21492         } else {
21493           return (com.google.protobuf.ByteString) ref;
21494         }
21495       }
21496 
initFields()21497       private void initFields() {
21498         family_ = com.google.protobuf.ByteString.EMPTY;
21499         path_ = "";
21500       }
21501       private byte memoizedIsInitialized = -1;
isInitialized()21502       public final boolean isInitialized() {
21503         byte isInitialized = memoizedIsInitialized;
21504         if (isInitialized != -1) return isInitialized == 1;
21505 
21506         if (!hasFamily()) {
21507           memoizedIsInitialized = 0;
21508           return false;
21509         }
21510         if (!hasPath()) {
21511           memoizedIsInitialized = 0;
21512           return false;
21513         }
21514         memoizedIsInitialized = 1;
21515         return true;
21516       }
21517 
writeTo(com.google.protobuf.CodedOutputStream output)21518       public void writeTo(com.google.protobuf.CodedOutputStream output)
21519                           throws java.io.IOException {
21520         getSerializedSize();
21521         if (((bitField0_ & 0x00000001) == 0x00000001)) {
21522           output.writeBytes(1, family_);
21523         }
21524         if (((bitField0_ & 0x00000002) == 0x00000002)) {
21525           output.writeBytes(2, getPathBytes());
21526         }
21527         getUnknownFields().writeTo(output);
21528       }
21529 
21530       private int memoizedSerializedSize = -1;
getSerializedSize()21531       public int getSerializedSize() {
21532         int size = memoizedSerializedSize;
21533         if (size != -1) return size;
21534 
21535         size = 0;
21536         if (((bitField0_ & 0x00000001) == 0x00000001)) {
21537           size += com.google.protobuf.CodedOutputStream
21538             .computeBytesSize(1, family_);
21539         }
21540         if (((bitField0_ & 0x00000002) == 0x00000002)) {
21541           size += com.google.protobuf.CodedOutputStream
21542             .computeBytesSize(2, getPathBytes());
21543         }
21544         size += getUnknownFields().getSerializedSize();
21545         memoizedSerializedSize = size;
21546         return size;
21547       }
21548 
21549       private static final long serialVersionUID = 0L;
21550       @java.lang.Override
writeReplace()21551       protected java.lang.Object writeReplace()
21552           throws java.io.ObjectStreamException {
21553         return super.writeReplace();
21554       }
21555 
21556       @java.lang.Override
equals(final java.lang.Object obj)21557       public boolean equals(final java.lang.Object obj) {
21558         if (obj == this) {
21559          return true;
21560         }
21561         if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)) {
21562           return super.equals(obj);
21563         }
21564         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) obj;
21565 
21566         boolean result = true;
21567         result = result && (hasFamily() == other.hasFamily());
21568         if (hasFamily()) {
21569           result = result && getFamily()
21570               .equals(other.getFamily());
21571         }
21572         result = result && (hasPath() == other.hasPath());
21573         if (hasPath()) {
21574           result = result && getPath()
21575               .equals(other.getPath());
21576         }
21577         result = result &&
21578             getUnknownFields().equals(other.getUnknownFields());
21579         return result;
21580       }
21581 
21582       private int memoizedHashCode = 0;
21583       @java.lang.Override
hashCode()21584       public int hashCode() {
21585         if (memoizedHashCode != 0) {
21586           return memoizedHashCode;
21587         }
21588         int hash = 41;
21589         hash = (19 * hash) + getDescriptorForType().hashCode();
21590         if (hasFamily()) {
21591           hash = (37 * hash) + FAMILY_FIELD_NUMBER;
21592           hash = (53 * hash) + getFamily().hashCode();
21593         }
21594         if (hasPath()) {
21595           hash = (37 * hash) + PATH_FIELD_NUMBER;
21596           hash = (53 * hash) + getPath().hashCode();
21597         }
21598         hash = (29 * hash) + getUnknownFields().hashCode();
21599         memoizedHashCode = hash;
21600         return hash;
21601       }
21602 
parseFrom( com.google.protobuf.ByteString data)21603       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
21604           com.google.protobuf.ByteString data)
21605           throws com.google.protobuf.InvalidProtocolBufferException {
21606         return PARSER.parseFrom(data);
21607       }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21608       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
21609           com.google.protobuf.ByteString data,
21610           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21611           throws com.google.protobuf.InvalidProtocolBufferException {
21612         return PARSER.parseFrom(data, extensionRegistry);
21613       }
parseFrom(byte[] data)21614       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data)
21615           throws com.google.protobuf.InvalidProtocolBufferException {
21616         return PARSER.parseFrom(data);
21617       }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21618       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
21619           byte[] data,
21620           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21621           throws com.google.protobuf.InvalidProtocolBufferException {
21622         return PARSER.parseFrom(data, extensionRegistry);
21623       }
parseFrom(java.io.InputStream input)21624       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input)
21625           throws java.io.IOException {
21626         return PARSER.parseFrom(input);
21627       }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21628       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
21629           java.io.InputStream input,
21630           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21631           throws java.io.IOException {
21632         return PARSER.parseFrom(input, extensionRegistry);
21633       }
parseDelimitedFrom(java.io.InputStream input)21634       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input)
21635           throws java.io.IOException {
21636         return PARSER.parseDelimitedFrom(input);
21637       }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21638       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(
21639           java.io.InputStream input,
21640           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21641           throws java.io.IOException {
21642         return PARSER.parseDelimitedFrom(input, extensionRegistry);
21643       }
parseFrom( com.google.protobuf.CodedInputStream input)21644       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
21645           com.google.protobuf.CodedInputStream input)
21646           throws java.io.IOException {
21647         return PARSER.parseFrom(input);
21648       }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21649       public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(
21650           com.google.protobuf.CodedInputStream input,
21651           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21652           throws java.io.IOException {
21653         return PARSER.parseFrom(input, extensionRegistry);
21654       }
21655 
newBuilder()21656       public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()21657       public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype)21658       public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath prototype) {
21659         return newBuilder().mergeFrom(prototype);
21660       }
toBuilder()21661       public Builder toBuilder() { return newBuilder(this); }
21662 
21663       @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)21664       protected Builder newBuilderForType(
21665           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21666         Builder builder = new Builder(parent);
21667         return builder;
21668       }
21669       /**
21670        * Protobuf type {@code BulkLoadHFileRequest.FamilyPath}
21671        */
21672       public static final class Builder extends
21673           com.google.protobuf.GeneratedMessage.Builder<Builder>
21674          implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder {
21675         public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()21676             getDescriptor() {
21677           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
21678         }
21679 
21680         protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()21681             internalGetFieldAccessorTable() {
21682           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable
21683               .ensureFieldAccessorsInitialized(
21684                   org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class);
21685         }
21686 
21687         // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder()
Builder()21688         private Builder() {
21689           maybeForceBuilderInitialization();
21690         }
21691 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)21692         private Builder(
21693             com.google.protobuf.GeneratedMessage.BuilderParent parent) {
21694           super(parent);
21695           maybeForceBuilderInitialization();
21696         }
maybeForceBuilderInitialization()21697         private void maybeForceBuilderInitialization() {
21698           if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
21699           }
21700         }
create()21701         private static Builder create() {
21702           return new Builder();
21703         }
21704 
clear()21705         public Builder clear() {
21706           super.clear();
21707           family_ = com.google.protobuf.ByteString.EMPTY;
21708           bitField0_ = (bitField0_ & ~0x00000001);
21709           path_ = "";
21710           bitField0_ = (bitField0_ & ~0x00000002);
21711           return this;
21712         }
21713 
clone()21714         public Builder clone() {
21715           return create().mergeFrom(buildPartial());
21716         }
21717 
21718         public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()21719             getDescriptorForType() {
21720           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
21721         }
21722 
getDefaultInstanceForType()21723         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() {
21724           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance();
21725         }
21726 
build()21727         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath build() {
21728           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial();
21729           if (!result.isInitialized()) {
21730             throw newUninitializedMessageException(result);
21731           }
21732           return result;
21733         }
21734 
buildPartial()21735         public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() {
21736           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath(this);
21737           int from_bitField0_ = bitField0_;
21738           int to_bitField0_ = 0;
21739           if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
21740             to_bitField0_ |= 0x00000001;
21741           }
21742           result.family_ = family_;
21743           if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
21744             to_bitField0_ |= 0x00000002;
21745           }
21746           result.path_ = path_;
21747           result.bitField0_ = to_bitField0_;
21748           onBuilt();
21749           return result;
21750         }
21751 
mergeFrom(com.google.protobuf.Message other)21752         public Builder mergeFrom(com.google.protobuf.Message other) {
21753           if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) {
21754             return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath)other);
21755           } else {
21756             super.mergeFrom(other);
21757             return this;
21758           }
21759         }
21760 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other)21761         public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath other) {
21762           if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this;
21763           if (other.hasFamily()) {
21764             setFamily(other.getFamily());
21765           }
21766           if (other.hasPath()) {
21767             bitField0_ |= 0x00000002;
21768             path_ = other.path_;
21769             onChanged();
21770           }
21771           this.mergeUnknownFields(other.getUnknownFields());
21772           return this;
21773         }
21774 
isInitialized()21775         public final boolean isInitialized() {
21776           if (!hasFamily()) {
21777 
21778             return false;
21779           }
21780           if (!hasPath()) {
21781 
21782             return false;
21783           }
21784           return true;
21785         }
21786 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)21787         public Builder mergeFrom(
21788             com.google.protobuf.CodedInputStream input,
21789             com.google.protobuf.ExtensionRegistryLite extensionRegistry)
21790             throws java.io.IOException {
21791           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath parsedMessage = null;
21792           try {
21793             parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
21794           } catch (com.google.protobuf.InvalidProtocolBufferException e) {
21795             parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath) e.getUnfinishedMessage();
21796             throw e;
21797           } finally {
21798             if (parsedMessage != null) {
21799               mergeFrom(parsedMessage);
21800             }
21801           }
21802           return this;
21803         }
21804         private int bitField0_;
21805 
21806         // required bytes family = 1;
21807         private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
21808         /**
21809          * <code>required bytes family = 1;</code>
21810          */
hasFamily()21811         public boolean hasFamily() {
21812           return ((bitField0_ & 0x00000001) == 0x00000001);
21813         }
21814         /**
21815          * <code>required bytes family = 1;</code>
21816          */
getFamily()21817         public com.google.protobuf.ByteString getFamily() {
21818           return family_;
21819         }
21820         /**
21821          * <code>required bytes family = 1;</code>
21822          */
setFamily(com.google.protobuf.ByteString value)21823         public Builder setFamily(com.google.protobuf.ByteString value) {
21824           if (value == null) {
21825     throw new NullPointerException();
21826   }
21827   bitField0_ |= 0x00000001;
21828           family_ = value;
21829           onChanged();
21830           return this;
21831         }
21832         /**
21833          * <code>required bytes family = 1;</code>
21834          */
clearFamily()21835         public Builder clearFamily() {
21836           bitField0_ = (bitField0_ & ~0x00000001);
21837           family_ = getDefaultInstance().getFamily();
21838           onChanged();
21839           return this;
21840         }
21841 
21842         // required string path = 2;
21843         private java.lang.Object path_ = "";
21844         /**
21845          * <code>required string path = 2;</code>
21846          */
hasPath()21847         public boolean hasPath() {
21848           return ((bitField0_ & 0x00000002) == 0x00000002);
21849         }
21850         /**
21851          * <code>required string path = 2;</code>
21852          */
getPath()21853         public java.lang.String getPath() {
21854           java.lang.Object ref = path_;
21855           if (!(ref instanceof java.lang.String)) {
21856             java.lang.String s = ((com.google.protobuf.ByteString) ref)
21857                 .toStringUtf8();
21858             path_ = s;
21859             return s;
21860           } else {
21861             return (java.lang.String) ref;
21862           }
21863         }
21864         /**
21865          * <code>required string path = 2;</code>
21866          */
21867         public com.google.protobuf.ByteString
getPathBytes()21868             getPathBytes() {
21869           java.lang.Object ref = path_;
21870           if (ref instanceof String) {
21871             com.google.protobuf.ByteString b =
21872                 com.google.protobuf.ByteString.copyFromUtf8(
21873                     (java.lang.String) ref);
21874             path_ = b;
21875             return b;
21876           } else {
21877             return (com.google.protobuf.ByteString) ref;
21878           }
21879         }
21880         /**
21881          * <code>required string path = 2;</code>
21882          */
setPath( java.lang.String value)21883         public Builder setPath(
21884             java.lang.String value) {
21885           if (value == null) {
21886     throw new NullPointerException();
21887   }
21888   bitField0_ |= 0x00000002;
21889           path_ = value;
21890           onChanged();
21891           return this;
21892         }
21893         /**
21894          * <code>required string path = 2;</code>
21895          */
clearPath()21896         public Builder clearPath() {
21897           bitField0_ = (bitField0_ & ~0x00000002);
21898           path_ = getDefaultInstance().getPath();
21899           onChanged();
21900           return this;
21901         }
21902         /**
21903          * <code>required string path = 2;</code>
21904          */
setPathBytes( com.google.protobuf.ByteString value)21905         public Builder setPathBytes(
21906             com.google.protobuf.ByteString value) {
21907           if (value == null) {
21908     throw new NullPointerException();
21909   }
21910   bitField0_ |= 0x00000002;
21911           path_ = value;
21912           onChanged();
21913           return this;
21914         }
21915 
21916         // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath)
21917       }
21918 
21919       static {
21920         defaultInstance = new FamilyPath(true);
defaultInstance.initFields()21921         defaultInstance.initFields();
21922       }
21923 
21924       // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath)
21925     }
21926 
21927     private int bitField0_;
21928     // required .RegionSpecifier region = 1;
21929     public static final int REGION_FIELD_NUMBER = 1;
21930     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
21931     /**
21932      * <code>required .RegionSpecifier region = 1;</code>
21933      */
hasRegion()21934     public boolean hasRegion() {
21935       return ((bitField0_ & 0x00000001) == 0x00000001);
21936     }
21937     /**
21938      * <code>required .RegionSpecifier region = 1;</code>
21939      */
getRegion()21940     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
21941       return region_;
21942     }
21943     /**
21944      * <code>required .RegionSpecifier region = 1;</code>
21945      */
getRegionOrBuilder()21946     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
21947       return region_;
21948     }
21949 
21950     // repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;
21951     public static final int FAMILY_PATH_FIELD_NUMBER = 2;
21952     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_;
21953     /**
21954      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21955      */
getFamilyPathList()21956     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
21957       return familyPath_;
21958     }
21959     /**
21960      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21961      */
21962     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathOrBuilderList()21963         getFamilyPathOrBuilderList() {
21964       return familyPath_;
21965     }
21966     /**
21967      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21968      */
getFamilyPathCount()21969     public int getFamilyPathCount() {
21970       return familyPath_.size();
21971     }
21972     /**
21973      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21974      */
getFamilyPath(int index)21975     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
21976       return familyPath_.get(index);
21977     }
21978     /**
21979      * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
21980      */
getFamilyPathOrBuilder( int index)21981     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
21982         int index) {
21983       return familyPath_.get(index);
21984     }
21985 
21986     // optional bool assign_seq_num = 3;
21987     public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 3;
21988     private boolean assignSeqNum_;
21989     /**
21990      * <code>optional bool assign_seq_num = 3;</code>
21991      */
hasAssignSeqNum()21992     public boolean hasAssignSeqNum() {
21993       return ((bitField0_ & 0x00000002) == 0x00000002);
21994     }
21995     /**
21996      * <code>optional bool assign_seq_num = 3;</code>
21997      */
getAssignSeqNum()21998     public boolean getAssignSeqNum() {
21999       return assignSeqNum_;
22000     }
22001 
initFields()22002     private void initFields() {
22003       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
22004       familyPath_ = java.util.Collections.emptyList();
22005       assignSeqNum_ = false;
22006     }
22007     private byte memoizedIsInitialized = -1;
isInitialized()22008     public final boolean isInitialized() {
22009       byte isInitialized = memoizedIsInitialized;
22010       if (isInitialized != -1) return isInitialized == 1;
22011 
22012       if (!hasRegion()) {
22013         memoizedIsInitialized = 0;
22014         return false;
22015       }
22016       if (!getRegion().isInitialized()) {
22017         memoizedIsInitialized = 0;
22018         return false;
22019       }
22020       for (int i = 0; i < getFamilyPathCount(); i++) {
22021         if (!getFamilyPath(i).isInitialized()) {
22022           memoizedIsInitialized = 0;
22023           return false;
22024         }
22025       }
22026       memoizedIsInitialized = 1;
22027       return true;
22028     }
22029 
writeTo(com.google.protobuf.CodedOutputStream output)22030     public void writeTo(com.google.protobuf.CodedOutputStream output)
22031                         throws java.io.IOException {
22032       getSerializedSize();
22033       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22034         output.writeMessage(1, region_);
22035       }
22036       for (int i = 0; i < familyPath_.size(); i++) {
22037         output.writeMessage(2, familyPath_.get(i));
22038       }
22039       if (((bitField0_ & 0x00000002) == 0x00000002)) {
22040         output.writeBool(3, assignSeqNum_);
22041       }
22042       getUnknownFields().writeTo(output);
22043     }
22044 
22045     private int memoizedSerializedSize = -1;
getSerializedSize()22046     public int getSerializedSize() {
22047       int size = memoizedSerializedSize;
22048       if (size != -1) return size;
22049 
22050       size = 0;
22051       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22052         size += com.google.protobuf.CodedOutputStream
22053           .computeMessageSize(1, region_);
22054       }
22055       for (int i = 0; i < familyPath_.size(); i++) {
22056         size += com.google.protobuf.CodedOutputStream
22057           .computeMessageSize(2, familyPath_.get(i));
22058       }
22059       if (((bitField0_ & 0x00000002) == 0x00000002)) {
22060         size += com.google.protobuf.CodedOutputStream
22061           .computeBoolSize(3, assignSeqNum_);
22062       }
22063       size += getUnknownFields().getSerializedSize();
22064       memoizedSerializedSize = size;
22065       return size;
22066     }
22067 
22068     private static final long serialVersionUID = 0L;
22069     @java.lang.Override
writeReplace()22070     protected java.lang.Object writeReplace()
22071         throws java.io.ObjectStreamException {
22072       return super.writeReplace();
22073     }
22074 
22075     @java.lang.Override
equals(final java.lang.Object obj)22076     public boolean equals(final java.lang.Object obj) {
22077       if (obj == this) {
22078        return true;
22079       }
22080       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)) {
22081         return super.equals(obj);
22082       }
22083       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) obj;
22084 
22085       boolean result = true;
22086       result = result && (hasRegion() == other.hasRegion());
22087       if (hasRegion()) {
22088         result = result && getRegion()
22089             .equals(other.getRegion());
22090       }
22091       result = result && getFamilyPathList()
22092           .equals(other.getFamilyPathList());
22093       result = result && (hasAssignSeqNum() == other.hasAssignSeqNum());
22094       if (hasAssignSeqNum()) {
22095         result = result && (getAssignSeqNum()
22096             == other.getAssignSeqNum());
22097       }
22098       result = result &&
22099           getUnknownFields().equals(other.getUnknownFields());
22100       return result;
22101     }
22102 
22103     private int memoizedHashCode = 0;
22104     @java.lang.Override
hashCode()22105     public int hashCode() {
22106       if (memoizedHashCode != 0) {
22107         return memoizedHashCode;
22108       }
22109       int hash = 41;
22110       hash = (19 * hash) + getDescriptorForType().hashCode();
22111       if (hasRegion()) {
22112         hash = (37 * hash) + REGION_FIELD_NUMBER;
22113         hash = (53 * hash) + getRegion().hashCode();
22114       }
22115       if (getFamilyPathCount() > 0) {
22116         hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER;
22117         hash = (53 * hash) + getFamilyPathList().hashCode();
22118       }
22119       if (hasAssignSeqNum()) {
22120         hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER;
22121         hash = (53 * hash) + hashBoolean(getAssignSeqNum());
22122       }
22123       hash = (29 * hash) + getUnknownFields().hashCode();
22124       memoizedHashCode = hash;
22125       return hash;
22126     }
22127 
parseFrom( com.google.protobuf.ByteString data)22128     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
22129         com.google.protobuf.ByteString data)
22130         throws com.google.protobuf.InvalidProtocolBufferException {
22131       return PARSER.parseFrom(data);
22132     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22133     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
22134         com.google.protobuf.ByteString data,
22135         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22136         throws com.google.protobuf.InvalidProtocolBufferException {
22137       return PARSER.parseFrom(data, extensionRegistry);
22138     }
parseFrom(byte[] data)22139     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(byte[] data)
22140         throws com.google.protobuf.InvalidProtocolBufferException {
22141       return PARSER.parseFrom(data);
22142     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22143     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
22144         byte[] data,
22145         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22146         throws com.google.protobuf.InvalidProtocolBufferException {
22147       return PARSER.parseFrom(data, extensionRegistry);
22148     }
parseFrom(java.io.InputStream input)22149     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input)
22150         throws java.io.IOException {
22151       return PARSER.parseFrom(input);
22152     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22153     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
22154         java.io.InputStream input,
22155         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22156         throws java.io.IOException {
22157       return PARSER.parseFrom(input, extensionRegistry);
22158     }
parseDelimitedFrom(java.io.InputStream input)22159     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input)
22160         throws java.io.IOException {
22161       return PARSER.parseDelimitedFrom(input);
22162     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22163     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseDelimitedFrom(
22164         java.io.InputStream input,
22165         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22166         throws java.io.IOException {
22167       return PARSER.parseDelimitedFrom(input, extensionRegistry);
22168     }
parseFrom( com.google.protobuf.CodedInputStream input)22169     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
22170         com.google.protobuf.CodedInputStream input)
22171         throws java.io.IOException {
22172       return PARSER.parseFrom(input);
22173     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22174     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parseFrom(
22175         com.google.protobuf.CodedInputStream input,
22176         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22177         throws java.io.IOException {
22178       return PARSER.parseFrom(input, extensionRegistry);
22179     }
22180 
newBuilder()22181     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()22182     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype)22183     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest prototype) {
22184       return newBuilder().mergeFrom(prototype);
22185     }
toBuilder()22186     public Builder toBuilder() { return newBuilder(this); }
22187 
22188     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)22189     protected Builder newBuilderForType(
22190         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22191       Builder builder = new Builder(parent);
22192       return builder;
22193     }
22194     /**
22195      * Protobuf type {@code BulkLoadHFileRequest}
22196      *
22197      * <pre>
22198      **
22199      * Atomically bulk load multiple HFiles (say from different column families)
22200      * into an open region.
22201      * </pre>
22202      */
22203     public static final class Builder extends
22204         com.google.protobuf.GeneratedMessage.Builder<Builder>
22205        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequestOrBuilder {
22206       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()22207           getDescriptor() {
22208         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor;
22209       }
22210 
22211       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()22212           internalGetFieldAccessorTable() {
22213         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable
22214             .ensureFieldAccessorsInitialized(
22215                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.Builder.class);
22216       }
22217 
22218       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.newBuilder()
Builder()22219       private Builder() {
22220         maybeForceBuilderInitialization();
22221       }
22222 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)22223       private Builder(
22224           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
22225         super(parent);
22226         maybeForceBuilderInitialization();
22227       }
maybeForceBuilderInitialization()22228       private void maybeForceBuilderInitialization() {
22229         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
22230           getRegionFieldBuilder();
22231           getFamilyPathFieldBuilder();
22232         }
22233       }
create()22234       private static Builder create() {
22235         return new Builder();
22236       }
22237 
clear()22238       public Builder clear() {
22239         super.clear();
22240         if (regionBuilder_ == null) {
22241           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
22242         } else {
22243           regionBuilder_.clear();
22244         }
22245         bitField0_ = (bitField0_ & ~0x00000001);
22246         if (familyPathBuilder_ == null) {
22247           familyPath_ = java.util.Collections.emptyList();
22248           bitField0_ = (bitField0_ & ~0x00000002);
22249         } else {
22250           familyPathBuilder_.clear();
22251         }
22252         assignSeqNum_ = false;
22253         bitField0_ = (bitField0_ & ~0x00000004);
22254         return this;
22255       }
22256 
clone()22257       public Builder clone() {
22258         return create().mergeFrom(buildPartial());
22259       }
22260 
22261       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()22262           getDescriptorForType() {
22263         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileRequest_descriptor;
22264       }
22265 
getDefaultInstanceForType()22266       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() {
22267         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
22268       }
22269 
build()22270       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest build() {
22271         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = buildPartial();
22272         if (!result.isInitialized()) {
22273           throw newUninitializedMessageException(result);
22274         }
22275         return result;
22276       }
22277 
buildPartial()22278       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest buildPartial() {
22279         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest(this);
22280         int from_bitField0_ = bitField0_;
22281         int to_bitField0_ = 0;
22282         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
22283           to_bitField0_ |= 0x00000001;
22284         }
22285         if (regionBuilder_ == null) {
22286           result.region_ = region_;
22287         } else {
22288           result.region_ = regionBuilder_.build();
22289         }
22290         if (familyPathBuilder_ == null) {
22291           if (((bitField0_ & 0x00000002) == 0x00000002)) {
22292             familyPath_ = java.util.Collections.unmodifiableList(familyPath_);
22293             bitField0_ = (bitField0_ & ~0x00000002);
22294           }
22295           result.familyPath_ = familyPath_;
22296         } else {
22297           result.familyPath_ = familyPathBuilder_.build();
22298         }
22299         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
22300           to_bitField0_ |= 0x00000002;
22301         }
22302         result.assignSeqNum_ = assignSeqNum_;
22303         result.bitField0_ = to_bitField0_;
22304         onBuilt();
22305         return result;
22306       }
22307 
mergeFrom(com.google.protobuf.Message other)22308       public Builder mergeFrom(com.google.protobuf.Message other) {
22309         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) {
22310           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)other);
22311         } else {
22312           super.mergeFrom(other);
22313           return this;
22314         }
22315       }
22316 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other)22317       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest other) {
22318         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this;
22319         if (other.hasRegion()) {
22320           mergeRegion(other.getRegion());
22321         }
22322         if (familyPathBuilder_ == null) {
22323           if (!other.familyPath_.isEmpty()) {
22324             if (familyPath_.isEmpty()) {
22325               familyPath_ = other.familyPath_;
22326               bitField0_ = (bitField0_ & ~0x00000002);
22327             } else {
22328               ensureFamilyPathIsMutable();
22329               familyPath_.addAll(other.familyPath_);
22330             }
22331             onChanged();
22332           }
22333         } else {
22334           if (!other.familyPath_.isEmpty()) {
22335             if (familyPathBuilder_.isEmpty()) {
22336               familyPathBuilder_.dispose();
22337               familyPathBuilder_ = null;
22338               familyPath_ = other.familyPath_;
22339               bitField0_ = (bitField0_ & ~0x00000002);
22340               familyPathBuilder_ =
22341                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
22342                    getFamilyPathFieldBuilder() : null;
22343             } else {
22344               familyPathBuilder_.addAllMessages(other.familyPath_);
22345             }
22346           }
22347         }
22348         if (other.hasAssignSeqNum()) {
22349           setAssignSeqNum(other.getAssignSeqNum());
22350         }
22351         this.mergeUnknownFields(other.getUnknownFields());
22352         return this;
22353       }
22354 
isInitialized()22355       public final boolean isInitialized() {
22356         if (!hasRegion()) {
22357 
22358           return false;
22359         }
22360         if (!getRegion().isInitialized()) {
22361 
22362           return false;
22363         }
22364         for (int i = 0; i < getFamilyPathCount(); i++) {
22365           if (!getFamilyPath(i).isInitialized()) {
22366 
22367             return false;
22368           }
22369         }
22370         return true;
22371       }
22372 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22373       public Builder mergeFrom(
22374           com.google.protobuf.CodedInputStream input,
22375           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22376           throws java.io.IOException {
22377         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest parsedMessage = null;
22378         try {
22379           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
22380         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22381           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest) e.getUnfinishedMessage();
22382           throw e;
22383         } finally {
22384           if (parsedMessage != null) {
22385             mergeFrom(parsedMessage);
22386           }
22387         }
22388         return this;
22389       }
22390       private int bitField0_;
22391 
22392       // required .RegionSpecifier region = 1;
22393       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
22394       private com.google.protobuf.SingleFieldBuilder<
22395           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
22396       /**
22397        * <code>required .RegionSpecifier region = 1;</code>
22398        */
hasRegion()22399       public boolean hasRegion() {
22400         return ((bitField0_ & 0x00000001) == 0x00000001);
22401       }
22402       /**
22403        * <code>required .RegionSpecifier region = 1;</code>
22404        */
getRegion()22405       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
22406         if (regionBuilder_ == null) {
22407           return region_;
22408         } else {
22409           return regionBuilder_.getMessage();
22410         }
22411       }
22412       /**
22413        * <code>required .RegionSpecifier region = 1;</code>
22414        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)22415       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
22416         if (regionBuilder_ == null) {
22417           if (value == null) {
22418             throw new NullPointerException();
22419           }
22420           region_ = value;
22421           onChanged();
22422         } else {
22423           regionBuilder_.setMessage(value);
22424         }
22425         bitField0_ |= 0x00000001;
22426         return this;
22427       }
22428       /**
22429        * <code>required .RegionSpecifier region = 1;</code>
22430        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)22431       public Builder setRegion(
22432           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
22433         if (regionBuilder_ == null) {
22434           region_ = builderForValue.build();
22435           onChanged();
22436         } else {
22437           regionBuilder_.setMessage(builderForValue.build());
22438         }
22439         bitField0_ |= 0x00000001;
22440         return this;
22441       }
22442       /**
22443        * <code>required .RegionSpecifier region = 1;</code>
22444        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)22445       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
22446         if (regionBuilder_ == null) {
22447           if (((bitField0_ & 0x00000001) == 0x00000001) &&
22448               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
22449             region_ =
22450               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
22451           } else {
22452             region_ = value;
22453           }
22454           onChanged();
22455         } else {
22456           regionBuilder_.mergeFrom(value);
22457         }
22458         bitField0_ |= 0x00000001;
22459         return this;
22460       }
22461       /**
22462        * <code>required .RegionSpecifier region = 1;</code>
22463        */
clearRegion()22464       public Builder clearRegion() {
22465         if (regionBuilder_ == null) {
22466           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
22467           onChanged();
22468         } else {
22469           regionBuilder_.clear();
22470         }
22471         bitField0_ = (bitField0_ & ~0x00000001);
22472         return this;
22473       }
22474       /**
22475        * <code>required .RegionSpecifier region = 1;</code>
22476        */
getRegionBuilder()22477       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
22478         bitField0_ |= 0x00000001;
22479         onChanged();
22480         return getRegionFieldBuilder().getBuilder();
22481       }
22482       /**
22483        * <code>required .RegionSpecifier region = 1;</code>
22484        */
getRegionOrBuilder()22485       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
22486         if (regionBuilder_ != null) {
22487           return regionBuilder_.getMessageOrBuilder();
22488         } else {
22489           return region_;
22490         }
22491       }
22492       /**
22493        * <code>required .RegionSpecifier region = 1;</code>
22494        */
22495       private com.google.protobuf.SingleFieldBuilder<
22496           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()22497           getRegionFieldBuilder() {
22498         if (regionBuilder_ == null) {
22499           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
22500               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
22501                   region_,
22502                   getParentForChildren(),
22503                   isClean());
22504           region_ = null;
22505         }
22506         return regionBuilder_;
22507       }
22508 
22509       // repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;
22510       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> familyPath_ =
22511         java.util.Collections.emptyList();
ensureFamilyPathIsMutable()22512       private void ensureFamilyPathIsMutable() {
22513         if (!((bitField0_ & 0x00000002) == 0x00000002)) {
22514           familyPath_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath>(familyPath_);
22515           bitField0_ |= 0x00000002;
22516          }
22517       }
22518 
22519       private com.google.protobuf.RepeatedFieldBuilder<
22520           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_;
22521 
22522       /**
22523        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22524        */
getFamilyPathList()22525       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> getFamilyPathList() {
22526         if (familyPathBuilder_ == null) {
22527           return java.util.Collections.unmodifiableList(familyPath_);
22528         } else {
22529           return familyPathBuilder_.getMessageList();
22530         }
22531       }
22532       /**
22533        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22534        */
getFamilyPathCount()22535       public int getFamilyPathCount() {
22536         if (familyPathBuilder_ == null) {
22537           return familyPath_.size();
22538         } else {
22539           return familyPathBuilder_.getCount();
22540         }
22541       }
22542       /**
22543        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22544        */
getFamilyPath(int index)22545       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
22546         if (familyPathBuilder_ == null) {
22547           return familyPath_.get(index);
22548         } else {
22549           return familyPathBuilder_.getMessage(index);
22550         }
22551       }
22552       /**
22553        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22554        */
setFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value)22555       public Builder setFamilyPath(
22556           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
22557         if (familyPathBuilder_ == null) {
22558           if (value == null) {
22559             throw new NullPointerException();
22560           }
22561           ensureFamilyPathIsMutable();
22562           familyPath_.set(index, value);
22563           onChanged();
22564         } else {
22565           familyPathBuilder_.setMessage(index, value);
22566         }
22567         return this;
22568       }
22569       /**
22570        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22571        */
setFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue)22572       public Builder setFamilyPath(
22573           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
22574         if (familyPathBuilder_ == null) {
22575           ensureFamilyPathIsMutable();
22576           familyPath_.set(index, builderForValue.build());
22577           onChanged();
22578         } else {
22579           familyPathBuilder_.setMessage(index, builderForValue.build());
22580         }
22581         return this;
22582       }
22583       /**
22584        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22585        */
addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value)22586       public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
22587         if (familyPathBuilder_ == null) {
22588           if (value == null) {
22589             throw new NullPointerException();
22590           }
22591           ensureFamilyPathIsMutable();
22592           familyPath_.add(value);
22593           onChanged();
22594         } else {
22595           familyPathBuilder_.addMessage(value);
22596         }
22597         return this;
22598       }
22599       /**
22600        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22601        */
addFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value)22602       public Builder addFamilyPath(
22603           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
22604         if (familyPathBuilder_ == null) {
22605           if (value == null) {
22606             throw new NullPointerException();
22607           }
22608           ensureFamilyPathIsMutable();
22609           familyPath_.add(index, value);
22610           onChanged();
22611         } else {
22612           familyPathBuilder_.addMessage(index, value);
22613         }
22614         return this;
22615       }
22616       /**
22617        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22618        */
addFamilyPath( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue)22619       public Builder addFamilyPath(
22620           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
22621         if (familyPathBuilder_ == null) {
22622           ensureFamilyPathIsMutable();
22623           familyPath_.add(builderForValue.build());
22624           onChanged();
22625         } else {
22626           familyPathBuilder_.addMessage(builderForValue.build());
22627         }
22628         return this;
22629       }
22630       /**
22631        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22632        */
addFamilyPath( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue)22633       public Builder addFamilyPath(
22634           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
22635         if (familyPathBuilder_ == null) {
22636           ensureFamilyPathIsMutable();
22637           familyPath_.add(index, builderForValue.build());
22638           onChanged();
22639         } else {
22640           familyPathBuilder_.addMessage(index, builderForValue.build());
22641         }
22642         return this;
22643       }
22644       /**
22645        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22646        */
addAllFamilyPath( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values)22647       public Builder addAllFamilyPath(
22648           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values) {
22649         if (familyPathBuilder_ == null) {
22650           ensureFamilyPathIsMutable();
22651           super.addAll(values, familyPath_);
22652           onChanged();
22653         } else {
22654           familyPathBuilder_.addAllMessages(values);
22655         }
22656         return this;
22657       }
22658       /**
22659        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22660        */
clearFamilyPath()22661       public Builder clearFamilyPath() {
22662         if (familyPathBuilder_ == null) {
22663           familyPath_ = java.util.Collections.emptyList();
22664           bitField0_ = (bitField0_ & ~0x00000002);
22665           onChanged();
22666         } else {
22667           familyPathBuilder_.clear();
22668         }
22669         return this;
22670       }
22671       /**
22672        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22673        */
removeFamilyPath(int index)22674       public Builder removeFamilyPath(int index) {
22675         if (familyPathBuilder_ == null) {
22676           ensureFamilyPathIsMutable();
22677           familyPath_.remove(index);
22678           onChanged();
22679         } else {
22680           familyPathBuilder_.remove(index);
22681         }
22682         return this;
22683       }
22684       /**
22685        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22686        */
getFamilyPathBuilder( int index)22687       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder(
22688           int index) {
22689         return getFamilyPathFieldBuilder().getBuilder(index);
22690       }
22691       /**
22692        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22693        */
getFamilyPathOrBuilder( int index)22694       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
22695           int index) {
22696         if (familyPathBuilder_ == null) {
22697           return familyPath_.get(index);  } else {
22698           return familyPathBuilder_.getMessageOrBuilder(index);
22699         }
22700       }
22701       /**
22702        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22703        */
22704       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathOrBuilderList()22705            getFamilyPathOrBuilderList() {
22706         if (familyPathBuilder_ != null) {
22707           return familyPathBuilder_.getMessageOrBuilderList();
22708         } else {
22709           return java.util.Collections.unmodifiableList(familyPath_);
22710         }
22711       }
22712       /**
22713        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22714        */
addFamilyPathBuilder()22715       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() {
22716         return getFamilyPathFieldBuilder().addBuilder(
22717             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
22718       }
22719       /**
22720        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22721        */
addFamilyPathBuilder( int index)22722       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder(
22723           int index) {
22724         return getFamilyPathFieldBuilder().addBuilder(
22725             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
22726       }
22727       /**
22728        * <code>repeated .BulkLoadHFileRequest.FamilyPath family_path = 2;</code>
22729        */
22730       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder>
getFamilyPathBuilderList()22731            getFamilyPathBuilderList() {
22732         return getFamilyPathFieldBuilder().getBuilderList();
22733       }
22734       private com.google.protobuf.RepeatedFieldBuilder<
22735           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
getFamilyPathFieldBuilder()22736           getFamilyPathFieldBuilder() {
22737         if (familyPathBuilder_ == null) {
22738           familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
22739               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>(
22740                   familyPath_,
22741                   ((bitField0_ & 0x00000002) == 0x00000002),
22742                   getParentForChildren(),
22743                   isClean());
22744           familyPath_ = null;
22745         }
22746         return familyPathBuilder_;
22747       }
22748 
22749       // optional bool assign_seq_num = 3;
22750       private boolean assignSeqNum_ ;
22751       /**
22752        * <code>optional bool assign_seq_num = 3;</code>
22753        */
hasAssignSeqNum()22754       public boolean hasAssignSeqNum() {
22755         return ((bitField0_ & 0x00000004) == 0x00000004);
22756       }
22757       /**
22758        * <code>optional bool assign_seq_num = 3;</code>
22759        */
getAssignSeqNum()22760       public boolean getAssignSeqNum() {
22761         return assignSeqNum_;
22762       }
22763       /**
22764        * <code>optional bool assign_seq_num = 3;</code>
22765        */
setAssignSeqNum(boolean value)22766       public Builder setAssignSeqNum(boolean value) {
22767         bitField0_ |= 0x00000004;
22768         assignSeqNum_ = value;
22769         onChanged();
22770         return this;
22771       }
22772       /**
22773        * <code>optional bool assign_seq_num = 3;</code>
22774        */
clearAssignSeqNum()22775       public Builder clearAssignSeqNum() {
22776         bitField0_ = (bitField0_ & ~0x00000004);
22777         assignSeqNum_ = false;
22778         onChanged();
22779         return this;
22780       }
22781 
22782       // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest)
22783     }
22784 
22785     static {
22786       defaultInstance = new BulkLoadHFileRequest(true);
defaultInstance.initFields()22787       defaultInstance.initFields();
22788     }
22789 
22790     // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest)
22791   }
22792 
22793   public interface BulkLoadHFileResponseOrBuilder
22794       extends com.google.protobuf.MessageOrBuilder {
22795 
22796     // required bool loaded = 1;
22797     /**
22798      * <code>required bool loaded = 1;</code>
22799      */
hasLoaded()22800     boolean hasLoaded();
22801     /**
22802      * <code>required bool loaded = 1;</code>
22803      */
getLoaded()22804     boolean getLoaded();
22805   }
22806   /**
22807    * Protobuf type {@code BulkLoadHFileResponse}
22808    */
22809   public static final class BulkLoadHFileResponse extends
22810       com.google.protobuf.GeneratedMessage
22811       implements BulkLoadHFileResponseOrBuilder {
22812     // Use BulkLoadHFileResponse.newBuilder() to construct.
BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)22813     private BulkLoadHFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
22814       super(builder);
22815       this.unknownFields = builder.getUnknownFields();
22816     }
BulkLoadHFileResponse(boolean noInit)22817     private BulkLoadHFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
22818 
22819     private static final BulkLoadHFileResponse defaultInstance;
getDefaultInstance()22820     public static BulkLoadHFileResponse getDefaultInstance() {
22821       return defaultInstance;
22822     }
22823 
getDefaultInstanceForType()22824     public BulkLoadHFileResponse getDefaultInstanceForType() {
22825       return defaultInstance;
22826     }
22827 
22828     private final com.google.protobuf.UnknownFieldSet unknownFields;
22829     @java.lang.Override
22830     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()22831         getUnknownFields() {
22832       return this.unknownFields;
22833     }
BulkLoadHFileResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)22834     private BulkLoadHFileResponse(
22835         com.google.protobuf.CodedInputStream input,
22836         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22837         throws com.google.protobuf.InvalidProtocolBufferException {
22838       initFields();
22839       int mutable_bitField0_ = 0;
22840       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
22841           com.google.protobuf.UnknownFieldSet.newBuilder();
22842       try {
22843         boolean done = false;
22844         while (!done) {
22845           int tag = input.readTag();
22846           switch (tag) {
22847             case 0:
22848               done = true;
22849               break;
22850             default: {
22851               if (!parseUnknownField(input, unknownFields,
22852                                      extensionRegistry, tag)) {
22853                 done = true;
22854               }
22855               break;
22856             }
22857             case 8: {
22858               bitField0_ |= 0x00000001;
22859               loaded_ = input.readBool();
22860               break;
22861             }
22862           }
22863         }
22864       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
22865         throw e.setUnfinishedMessage(this);
22866       } catch (java.io.IOException e) {
22867         throw new com.google.protobuf.InvalidProtocolBufferException(
22868             e.getMessage()).setUnfinishedMessage(this);
22869       } finally {
22870         this.unknownFields = unknownFields.build();
22871         makeExtensionsImmutable();
22872       }
22873     }
22874     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()22875         getDescriptor() {
22876       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor;
22877     }
22878 
22879     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()22880         internalGetFieldAccessorTable() {
22881       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable
22882           .ensureFieldAccessorsInitialized(
22883               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
22884     }
22885 
22886     public static com.google.protobuf.Parser<BulkLoadHFileResponse> PARSER =
22887         new com.google.protobuf.AbstractParser<BulkLoadHFileResponse>() {
22888       public BulkLoadHFileResponse parsePartialFrom(
22889           com.google.protobuf.CodedInputStream input,
22890           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
22891           throws com.google.protobuf.InvalidProtocolBufferException {
22892         return new BulkLoadHFileResponse(input, extensionRegistry);
22893       }
22894     };
22895 
22896     @java.lang.Override
getParserForType()22897     public com.google.protobuf.Parser<BulkLoadHFileResponse> getParserForType() {
22898       return PARSER;
22899     }
22900 
22901     private int bitField0_;
22902     // required bool loaded = 1;
22903     public static final int LOADED_FIELD_NUMBER = 1;
22904     private boolean loaded_;
22905     /**
22906      * <code>required bool loaded = 1;</code>
22907      */
hasLoaded()22908     public boolean hasLoaded() {
22909       return ((bitField0_ & 0x00000001) == 0x00000001);
22910     }
22911     /**
22912      * <code>required bool loaded = 1;</code>
22913      */
getLoaded()22914     public boolean getLoaded() {
22915       return loaded_;
22916     }
22917 
initFields()22918     private void initFields() {
22919       loaded_ = false;
22920     }
22921     private byte memoizedIsInitialized = -1;
isInitialized()22922     public final boolean isInitialized() {
22923       byte isInitialized = memoizedIsInitialized;
22924       if (isInitialized != -1) return isInitialized == 1;
22925 
22926       if (!hasLoaded()) {
22927         memoizedIsInitialized = 0;
22928         return false;
22929       }
22930       memoizedIsInitialized = 1;
22931       return true;
22932     }
22933 
writeTo(com.google.protobuf.CodedOutputStream output)22934     public void writeTo(com.google.protobuf.CodedOutputStream output)
22935                         throws java.io.IOException {
22936       getSerializedSize();
22937       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22938         output.writeBool(1, loaded_);
22939       }
22940       getUnknownFields().writeTo(output);
22941     }
22942 
22943     private int memoizedSerializedSize = -1;
getSerializedSize()22944     public int getSerializedSize() {
22945       int size = memoizedSerializedSize;
22946       if (size != -1) return size;
22947 
22948       size = 0;
22949       if (((bitField0_ & 0x00000001) == 0x00000001)) {
22950         size += com.google.protobuf.CodedOutputStream
22951           .computeBoolSize(1, loaded_);
22952       }
22953       size += getUnknownFields().getSerializedSize();
22954       memoizedSerializedSize = size;
22955       return size;
22956     }
22957 
22958     private static final long serialVersionUID = 0L;
22959     @java.lang.Override
writeReplace()22960     protected java.lang.Object writeReplace()
22961         throws java.io.ObjectStreamException {
22962       return super.writeReplace();
22963     }
22964 
22965     @java.lang.Override
equals(final java.lang.Object obj)22966     public boolean equals(final java.lang.Object obj) {
22967       if (obj == this) {
22968        return true;
22969       }
22970       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)) {
22971         return super.equals(obj);
22972       }
22973       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) obj;
22974 
22975       boolean result = true;
22976       result = result && (hasLoaded() == other.hasLoaded());
22977       if (hasLoaded()) {
22978         result = result && (getLoaded()
22979             == other.getLoaded());
22980       }
22981       result = result &&
22982           getUnknownFields().equals(other.getUnknownFields());
22983       return result;
22984     }
22985 
22986     private int memoizedHashCode = 0;
22987     @java.lang.Override
hashCode()22988     public int hashCode() {
22989       if (memoizedHashCode != 0) {
22990         return memoizedHashCode;
22991       }
22992       int hash = 41;
22993       hash = (19 * hash) + getDescriptorForType().hashCode();
22994       if (hasLoaded()) {
22995         hash = (37 * hash) + LOADED_FIELD_NUMBER;
22996         hash = (53 * hash) + hashBoolean(getLoaded());
22997       }
22998       hash = (29 * hash) + getUnknownFields().hashCode();
22999       memoizedHashCode = hash;
23000       return hash;
23001     }
23002 
parseFrom( com.google.protobuf.ByteString data)23003     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
23004         com.google.protobuf.ByteString data)
23005         throws com.google.protobuf.InvalidProtocolBufferException {
23006       return PARSER.parseFrom(data);
23007     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23008     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
23009         com.google.protobuf.ByteString data,
23010         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23011         throws com.google.protobuf.InvalidProtocolBufferException {
23012       return PARSER.parseFrom(data, extensionRegistry);
23013     }
parseFrom(byte[] data)23014     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(byte[] data)
23015         throws com.google.protobuf.InvalidProtocolBufferException {
23016       return PARSER.parseFrom(data);
23017     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23018     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
23019         byte[] data,
23020         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23021         throws com.google.protobuf.InvalidProtocolBufferException {
23022       return PARSER.parseFrom(data, extensionRegistry);
23023     }
parseFrom(java.io.InputStream input)23024     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input)
23025         throws java.io.IOException {
23026       return PARSER.parseFrom(input);
23027     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23028     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
23029         java.io.InputStream input,
23030         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23031         throws java.io.IOException {
23032       return PARSER.parseFrom(input, extensionRegistry);
23033     }
parseDelimitedFrom(java.io.InputStream input)23034     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input)
23035         throws java.io.IOException {
23036       return PARSER.parseDelimitedFrom(input);
23037     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23038     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseDelimitedFrom(
23039         java.io.InputStream input,
23040         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23041         throws java.io.IOException {
23042       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23043     }
parseFrom( com.google.protobuf.CodedInputStream input)23044     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
23045         com.google.protobuf.CodedInputStream input)
23046         throws java.io.IOException {
23047       return PARSER.parseFrom(input);
23048     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23049     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parseFrom(
23050         com.google.protobuf.CodedInputStream input,
23051         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23052         throws java.io.IOException {
23053       return PARSER.parseFrom(input, extensionRegistry);
23054     }
23055 
newBuilder()23056     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()23057     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype)23058     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse prototype) {
23059       return newBuilder().mergeFrom(prototype);
23060     }
toBuilder()23061     public Builder toBuilder() { return newBuilder(this); }
23062 
23063     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)23064     protected Builder newBuilderForType(
23065         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23066       Builder builder = new Builder(parent);
23067       return builder;
23068     }
23069     /**
23070      * Protobuf type {@code BulkLoadHFileResponse}
23071      */
23072     public static final class Builder extends
23073         com.google.protobuf.GeneratedMessage.Builder<Builder>
23074        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponseOrBuilder {
23075       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23076           getDescriptor() {
23077         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor;
23078       }
23079 
23080       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23081           internalGetFieldAccessorTable() {
23082         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable
23083             .ensureFieldAccessorsInitialized(
23084                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
23085       }
23086 
23087       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.newBuilder()
Builder()23088       private Builder() {
23089         maybeForceBuilderInitialization();
23090       }
23091 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)23092       private Builder(
23093           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23094         super(parent);
23095         maybeForceBuilderInitialization();
23096       }
maybeForceBuilderInitialization()23097       private void maybeForceBuilderInitialization() {
23098         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23099         }
23100       }
create()23101       private static Builder create() {
23102         return new Builder();
23103       }
23104 
clear()23105       public Builder clear() {
23106         super.clear();
23107         loaded_ = false;
23108         bitField0_ = (bitField0_ & ~0x00000001);
23109         return this;
23110       }
23111 
clone()23112       public Builder clone() {
23113         return create().mergeFrom(buildPartial());
23114       }
23115 
23116       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()23117           getDescriptorForType() {
23118         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_BulkLoadHFileResponse_descriptor;
23119       }
23120 
getDefaultInstanceForType()23121       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() {
23122         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
23123       }
23124 
build()23125       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse build() {
23126         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = buildPartial();
23127         if (!result.isInitialized()) {
23128           throw newUninitializedMessageException(result);
23129         }
23130         return result;
23131       }
23132 
buildPartial()23133       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse buildPartial() {
23134         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse(this);
23135         int from_bitField0_ = bitField0_;
23136         int to_bitField0_ = 0;
23137         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
23138           to_bitField0_ |= 0x00000001;
23139         }
23140         result.loaded_ = loaded_;
23141         result.bitField0_ = to_bitField0_;
23142         onBuilt();
23143         return result;
23144       }
23145 
mergeFrom(com.google.protobuf.Message other)23146       public Builder mergeFrom(com.google.protobuf.Message other) {
23147         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) {
23148           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse)other);
23149         } else {
23150           super.mergeFrom(other);
23151           return this;
23152         }
23153       }
23154 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other)23155       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse other) {
23156         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this;
23157         if (other.hasLoaded()) {
23158           setLoaded(other.getLoaded());
23159         }
23160         this.mergeUnknownFields(other.getUnknownFields());
23161         return this;
23162       }
23163 
isInitialized()23164       public final boolean isInitialized() {
23165         if (!hasLoaded()) {
23166 
23167           return false;
23168         }
23169         return true;
23170       }
23171 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23172       public Builder mergeFrom(
23173           com.google.protobuf.CodedInputStream input,
23174           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23175           throws java.io.IOException {
23176         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse parsedMessage = null;
23177         try {
23178           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23179         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23180           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) e.getUnfinishedMessage();
23181           throw e;
23182         } finally {
23183           if (parsedMessage != null) {
23184             mergeFrom(parsedMessage);
23185           }
23186         }
23187         return this;
23188       }
23189       private int bitField0_;
23190 
23191       // required bool loaded = 1;
23192       private boolean loaded_ ;
23193       /**
23194        * <code>required bool loaded = 1;</code>
23195        */
hasLoaded()23196       public boolean hasLoaded() {
23197         return ((bitField0_ & 0x00000001) == 0x00000001);
23198       }
23199       /**
23200        * <code>required bool loaded = 1;</code>
23201        */
getLoaded()23202       public boolean getLoaded() {
23203         return loaded_;
23204       }
23205       /**
23206        * <code>required bool loaded = 1;</code>
23207        */
setLoaded(boolean value)23208       public Builder setLoaded(boolean value) {
23209         bitField0_ |= 0x00000001;
23210         loaded_ = value;
23211         onChanged();
23212         return this;
23213       }
23214       /**
23215        * <code>required bool loaded = 1;</code>
23216        */
clearLoaded()23217       public Builder clearLoaded() {
23218         bitField0_ = (bitField0_ & ~0x00000001);
23219         loaded_ = false;
23220         onChanged();
23221         return this;
23222       }
23223 
23224       // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse)
23225     }
23226 
23227     static {
23228       defaultInstance = new BulkLoadHFileResponse(true);
defaultInstance.initFields()23229       defaultInstance.initFields();
23230     }
23231 
23232     // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse)
23233   }
23234 
23235   public interface CoprocessorServiceCallOrBuilder
23236       extends com.google.protobuf.MessageOrBuilder {
23237 
23238     // required bytes row = 1;
23239     /**
23240      * <code>required bytes row = 1;</code>
23241      */
hasRow()23242     boolean hasRow();
23243     /**
23244      * <code>required bytes row = 1;</code>
23245      */
getRow()23246     com.google.protobuf.ByteString getRow();
23247 
23248     // required string service_name = 2;
23249     /**
23250      * <code>required string service_name = 2;</code>
23251      */
hasServiceName()23252     boolean hasServiceName();
23253     /**
23254      * <code>required string service_name = 2;</code>
23255      */
getServiceName()23256     java.lang.String getServiceName();
23257     /**
23258      * <code>required string service_name = 2;</code>
23259      */
23260     com.google.protobuf.ByteString
getServiceNameBytes()23261         getServiceNameBytes();
23262 
23263     // required string method_name = 3;
23264     /**
23265      * <code>required string method_name = 3;</code>
23266      */
hasMethodName()23267     boolean hasMethodName();
23268     /**
23269      * <code>required string method_name = 3;</code>
23270      */
getMethodName()23271     java.lang.String getMethodName();
23272     /**
23273      * <code>required string method_name = 3;</code>
23274      */
23275     com.google.protobuf.ByteString
getMethodNameBytes()23276         getMethodNameBytes();
23277 
23278     // required bytes request = 4;
23279     /**
23280      * <code>required bytes request = 4;</code>
23281      */
hasRequest()23282     boolean hasRequest();
23283     /**
23284      * <code>required bytes request = 4;</code>
23285      */
getRequest()23286     com.google.protobuf.ByteString getRequest();
23287   }
23288   /**
23289    * Protobuf type {@code CoprocessorServiceCall}
23290    */
23291   public static final class CoprocessorServiceCall extends
23292       com.google.protobuf.GeneratedMessage
23293       implements CoprocessorServiceCallOrBuilder {
23294     // Use CoprocessorServiceCall.newBuilder() to construct.
CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder<?> builder)23295     private CoprocessorServiceCall(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
23296       super(builder);
23297       this.unknownFields = builder.getUnknownFields();
23298     }
CoprocessorServiceCall(boolean noInit)23299     private CoprocessorServiceCall(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
23300 
23301     private static final CoprocessorServiceCall defaultInstance;
getDefaultInstance()23302     public static CoprocessorServiceCall getDefaultInstance() {
23303       return defaultInstance;
23304     }
23305 
getDefaultInstanceForType()23306     public CoprocessorServiceCall getDefaultInstanceForType() {
23307       return defaultInstance;
23308     }
23309 
23310     private final com.google.protobuf.UnknownFieldSet unknownFields;
23311     @java.lang.Override
23312     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()23313         getUnknownFields() {
23314       return this.unknownFields;
23315     }
CoprocessorServiceCall( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23316     private CoprocessorServiceCall(
23317         com.google.protobuf.CodedInputStream input,
23318         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23319         throws com.google.protobuf.InvalidProtocolBufferException {
23320       initFields();
23321       int mutable_bitField0_ = 0;
23322       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
23323           com.google.protobuf.UnknownFieldSet.newBuilder();
23324       try {
23325         boolean done = false;
23326         while (!done) {
23327           int tag = input.readTag();
23328           switch (tag) {
23329             case 0:
23330               done = true;
23331               break;
23332             default: {
23333               if (!parseUnknownField(input, unknownFields,
23334                                      extensionRegistry, tag)) {
23335                 done = true;
23336               }
23337               break;
23338             }
23339             case 10: {
23340               bitField0_ |= 0x00000001;
23341               row_ = input.readBytes();
23342               break;
23343             }
23344             case 18: {
23345               bitField0_ |= 0x00000002;
23346               serviceName_ = input.readBytes();
23347               break;
23348             }
23349             case 26: {
23350               bitField0_ |= 0x00000004;
23351               methodName_ = input.readBytes();
23352               break;
23353             }
23354             case 34: {
23355               bitField0_ |= 0x00000008;
23356               request_ = input.readBytes();
23357               break;
23358             }
23359           }
23360         }
23361       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23362         throw e.setUnfinishedMessage(this);
23363       } catch (java.io.IOException e) {
23364         throw new com.google.protobuf.InvalidProtocolBufferException(
23365             e.getMessage()).setUnfinishedMessage(this);
23366       } finally {
23367         this.unknownFields = unknownFields.build();
23368         makeExtensionsImmutable();
23369       }
23370     }
23371     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23372         getDescriptor() {
23373       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor;
23374     }
23375 
23376     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23377         internalGetFieldAccessorTable() {
23378       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable
23379           .ensureFieldAccessorsInitialized(
23380               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
23381     }
23382 
23383     public static com.google.protobuf.Parser<CoprocessorServiceCall> PARSER =
23384         new com.google.protobuf.AbstractParser<CoprocessorServiceCall>() {
23385       public CoprocessorServiceCall parsePartialFrom(
23386           com.google.protobuf.CodedInputStream input,
23387           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23388           throws com.google.protobuf.InvalidProtocolBufferException {
23389         return new CoprocessorServiceCall(input, extensionRegistry);
23390       }
23391     };
23392 
23393     @java.lang.Override
getParserForType()23394     public com.google.protobuf.Parser<CoprocessorServiceCall> getParserForType() {
23395       return PARSER;
23396     }
23397 
23398     private int bitField0_;
23399     // required bytes row = 1;
23400     public static final int ROW_FIELD_NUMBER = 1;
23401     private com.google.protobuf.ByteString row_;
23402     /**
23403      * <code>required bytes row = 1;</code>
23404      */
hasRow()23405     public boolean hasRow() {
23406       return ((bitField0_ & 0x00000001) == 0x00000001);
23407     }
23408     /**
23409      * <code>required bytes row = 1;</code>
23410      */
getRow()23411     public com.google.protobuf.ByteString getRow() {
23412       return row_;
23413     }
23414 
23415     // required string service_name = 2;
23416     public static final int SERVICE_NAME_FIELD_NUMBER = 2;
23417     private java.lang.Object serviceName_;
23418     /**
23419      * <code>required string service_name = 2;</code>
23420      */
hasServiceName()23421     public boolean hasServiceName() {
23422       return ((bitField0_ & 0x00000002) == 0x00000002);
23423     }
23424     /**
23425      * <code>required string service_name = 2;</code>
23426      */
getServiceName()23427     public java.lang.String getServiceName() {
23428       java.lang.Object ref = serviceName_;
23429       if (ref instanceof java.lang.String) {
23430         return (java.lang.String) ref;
23431       } else {
23432         com.google.protobuf.ByteString bs =
23433             (com.google.protobuf.ByteString) ref;
23434         java.lang.String s = bs.toStringUtf8();
23435         if (bs.isValidUtf8()) {
23436           serviceName_ = s;
23437         }
23438         return s;
23439       }
23440     }
23441     /**
23442      * <code>required string service_name = 2;</code>
23443      */
23444     public com.google.protobuf.ByteString
getServiceNameBytes()23445         getServiceNameBytes() {
23446       java.lang.Object ref = serviceName_;
23447       if (ref instanceof java.lang.String) {
23448         com.google.protobuf.ByteString b =
23449             com.google.protobuf.ByteString.copyFromUtf8(
23450                 (java.lang.String) ref);
23451         serviceName_ = b;
23452         return b;
23453       } else {
23454         return (com.google.protobuf.ByteString) ref;
23455       }
23456     }
23457 
23458     // required string method_name = 3;
23459     public static final int METHOD_NAME_FIELD_NUMBER = 3;
23460     private java.lang.Object methodName_;
23461     /**
23462      * <code>required string method_name = 3;</code>
23463      */
hasMethodName()23464     public boolean hasMethodName() {
23465       return ((bitField0_ & 0x00000004) == 0x00000004);
23466     }
23467     /**
23468      * <code>required string method_name = 3;</code>
23469      */
getMethodName()23470     public java.lang.String getMethodName() {
23471       java.lang.Object ref = methodName_;
23472       if (ref instanceof java.lang.String) {
23473         return (java.lang.String) ref;
23474       } else {
23475         com.google.protobuf.ByteString bs =
23476             (com.google.protobuf.ByteString) ref;
23477         java.lang.String s = bs.toStringUtf8();
23478         if (bs.isValidUtf8()) {
23479           methodName_ = s;
23480         }
23481         return s;
23482       }
23483     }
23484     /**
23485      * <code>required string method_name = 3;</code>
23486      */
23487     public com.google.protobuf.ByteString
getMethodNameBytes()23488         getMethodNameBytes() {
23489       java.lang.Object ref = methodName_;
23490       if (ref instanceof java.lang.String) {
23491         com.google.protobuf.ByteString b =
23492             com.google.protobuf.ByteString.copyFromUtf8(
23493                 (java.lang.String) ref);
23494         methodName_ = b;
23495         return b;
23496       } else {
23497         return (com.google.protobuf.ByteString) ref;
23498       }
23499     }
23500 
23501     // required bytes request = 4;
23502     public static final int REQUEST_FIELD_NUMBER = 4;
23503     private com.google.protobuf.ByteString request_;
23504     /**
23505      * <code>required bytes request = 4;</code>
23506      */
hasRequest()23507     public boolean hasRequest() {
23508       return ((bitField0_ & 0x00000008) == 0x00000008);
23509     }
23510     /**
23511      * <code>required bytes request = 4;</code>
23512      */
getRequest()23513     public com.google.protobuf.ByteString getRequest() {
23514       return request_;
23515     }
23516 
initFields()23517     private void initFields() {
23518       row_ = com.google.protobuf.ByteString.EMPTY;
23519       serviceName_ = "";
23520       methodName_ = "";
23521       request_ = com.google.protobuf.ByteString.EMPTY;
23522     }
23523     private byte memoizedIsInitialized = -1;
isInitialized()23524     public final boolean isInitialized() {
23525       byte isInitialized = memoizedIsInitialized;
23526       if (isInitialized != -1) return isInitialized == 1;
23527 
23528       if (!hasRow()) {
23529         memoizedIsInitialized = 0;
23530         return false;
23531       }
23532       if (!hasServiceName()) {
23533         memoizedIsInitialized = 0;
23534         return false;
23535       }
23536       if (!hasMethodName()) {
23537         memoizedIsInitialized = 0;
23538         return false;
23539       }
23540       if (!hasRequest()) {
23541         memoizedIsInitialized = 0;
23542         return false;
23543       }
23544       memoizedIsInitialized = 1;
23545       return true;
23546     }
23547 
writeTo(com.google.protobuf.CodedOutputStream output)23548     public void writeTo(com.google.protobuf.CodedOutputStream output)
23549                         throws java.io.IOException {
23550       getSerializedSize();
23551       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23552         output.writeBytes(1, row_);
23553       }
23554       if (((bitField0_ & 0x00000002) == 0x00000002)) {
23555         output.writeBytes(2, getServiceNameBytes());
23556       }
23557       if (((bitField0_ & 0x00000004) == 0x00000004)) {
23558         output.writeBytes(3, getMethodNameBytes());
23559       }
23560       if (((bitField0_ & 0x00000008) == 0x00000008)) {
23561         output.writeBytes(4, request_);
23562       }
23563       getUnknownFields().writeTo(output);
23564     }
23565 
23566     private int memoizedSerializedSize = -1;
getSerializedSize()23567     public int getSerializedSize() {
23568       int size = memoizedSerializedSize;
23569       if (size != -1) return size;
23570 
23571       size = 0;
23572       if (((bitField0_ & 0x00000001) == 0x00000001)) {
23573         size += com.google.protobuf.CodedOutputStream
23574           .computeBytesSize(1, row_);
23575       }
23576       if (((bitField0_ & 0x00000002) == 0x00000002)) {
23577         size += com.google.protobuf.CodedOutputStream
23578           .computeBytesSize(2, getServiceNameBytes());
23579       }
23580       if (((bitField0_ & 0x00000004) == 0x00000004)) {
23581         size += com.google.protobuf.CodedOutputStream
23582           .computeBytesSize(3, getMethodNameBytes());
23583       }
23584       if (((bitField0_ & 0x00000008) == 0x00000008)) {
23585         size += com.google.protobuf.CodedOutputStream
23586           .computeBytesSize(4, request_);
23587       }
23588       size += getUnknownFields().getSerializedSize();
23589       memoizedSerializedSize = size;
23590       return size;
23591     }
23592 
23593     private static final long serialVersionUID = 0L;
23594     @java.lang.Override
writeReplace()23595     protected java.lang.Object writeReplace()
23596         throws java.io.ObjectStreamException {
23597       return super.writeReplace();
23598     }
23599 
23600     @java.lang.Override
equals(final java.lang.Object obj)23601     public boolean equals(final java.lang.Object obj) {
23602       if (obj == this) {
23603        return true;
23604       }
23605       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)) {
23606         return super.equals(obj);
23607       }
23608       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) obj;
23609 
23610       boolean result = true;
23611       result = result && (hasRow() == other.hasRow());
23612       if (hasRow()) {
23613         result = result && getRow()
23614             .equals(other.getRow());
23615       }
23616       result = result && (hasServiceName() == other.hasServiceName());
23617       if (hasServiceName()) {
23618         result = result && getServiceName()
23619             .equals(other.getServiceName());
23620       }
23621       result = result && (hasMethodName() == other.hasMethodName());
23622       if (hasMethodName()) {
23623         result = result && getMethodName()
23624             .equals(other.getMethodName());
23625       }
23626       result = result && (hasRequest() == other.hasRequest());
23627       if (hasRequest()) {
23628         result = result && getRequest()
23629             .equals(other.getRequest());
23630       }
23631       result = result &&
23632           getUnknownFields().equals(other.getUnknownFields());
23633       return result;
23634     }
23635 
23636     private int memoizedHashCode = 0;
23637     @java.lang.Override
hashCode()23638     public int hashCode() {
23639       if (memoizedHashCode != 0) {
23640         return memoizedHashCode;
23641       }
23642       int hash = 41;
23643       hash = (19 * hash) + getDescriptorForType().hashCode();
23644       if (hasRow()) {
23645         hash = (37 * hash) + ROW_FIELD_NUMBER;
23646         hash = (53 * hash) + getRow().hashCode();
23647       }
23648       if (hasServiceName()) {
23649         hash = (37 * hash) + SERVICE_NAME_FIELD_NUMBER;
23650         hash = (53 * hash) + getServiceName().hashCode();
23651       }
23652       if (hasMethodName()) {
23653         hash = (37 * hash) + METHOD_NAME_FIELD_NUMBER;
23654         hash = (53 * hash) + getMethodName().hashCode();
23655       }
23656       if (hasRequest()) {
23657         hash = (37 * hash) + REQUEST_FIELD_NUMBER;
23658         hash = (53 * hash) + getRequest().hashCode();
23659       }
23660       hash = (29 * hash) + getUnknownFields().hashCode();
23661       memoizedHashCode = hash;
23662       return hash;
23663     }
23664 
parseFrom( com.google.protobuf.ByteString data)23665     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
23666         com.google.protobuf.ByteString data)
23667         throws com.google.protobuf.InvalidProtocolBufferException {
23668       return PARSER.parseFrom(data);
23669     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23670     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
23671         com.google.protobuf.ByteString data,
23672         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23673         throws com.google.protobuf.InvalidProtocolBufferException {
23674       return PARSER.parseFrom(data, extensionRegistry);
23675     }
parseFrom(byte[] data)23676     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(byte[] data)
23677         throws com.google.protobuf.InvalidProtocolBufferException {
23678       return PARSER.parseFrom(data);
23679     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23680     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
23681         byte[] data,
23682         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23683         throws com.google.protobuf.InvalidProtocolBufferException {
23684       return PARSER.parseFrom(data, extensionRegistry);
23685     }
parseFrom(java.io.InputStream input)23686     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(java.io.InputStream input)
23687         throws java.io.IOException {
23688       return PARSER.parseFrom(input);
23689     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23690     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
23691         java.io.InputStream input,
23692         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23693         throws java.io.IOException {
23694       return PARSER.parseFrom(input, extensionRegistry);
23695     }
parseDelimitedFrom(java.io.InputStream input)23696     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(java.io.InputStream input)
23697         throws java.io.IOException {
23698       return PARSER.parseDelimitedFrom(input);
23699     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23700     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseDelimitedFrom(
23701         java.io.InputStream input,
23702         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23703         throws java.io.IOException {
23704       return PARSER.parseDelimitedFrom(input, extensionRegistry);
23705     }
parseFrom( com.google.protobuf.CodedInputStream input)23706     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
23707         com.google.protobuf.CodedInputStream input)
23708         throws java.io.IOException {
23709       return PARSER.parseFrom(input);
23710     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23711     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parseFrom(
23712         com.google.protobuf.CodedInputStream input,
23713         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23714         throws java.io.IOException {
23715       return PARSER.parseFrom(input, extensionRegistry);
23716     }
23717 
newBuilder()23718     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()23719     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype)23720     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall prototype) {
23721       return newBuilder().mergeFrom(prototype);
23722     }
toBuilder()23723     public Builder toBuilder() { return newBuilder(this); }
23724 
23725     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)23726     protected Builder newBuilderForType(
23727         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23728       Builder builder = new Builder(parent);
23729       return builder;
23730     }
23731     /**
23732      * Protobuf type {@code CoprocessorServiceCall}
23733      */
23734     public static final class Builder extends
23735         com.google.protobuf.GeneratedMessage.Builder<Builder>
23736        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder {
23737       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()23738           getDescriptor() {
23739         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor;
23740       }
23741 
23742       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()23743           internalGetFieldAccessorTable() {
23744         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_fieldAccessorTable
23745             .ensureFieldAccessorsInitialized(
23746                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
23747       }
23748 
23749       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder()
Builder()23750       private Builder() {
23751         maybeForceBuilderInitialization();
23752       }
23753 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)23754       private Builder(
23755           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
23756         super(parent);
23757         maybeForceBuilderInitialization();
23758       }
maybeForceBuilderInitialization()23759       private void maybeForceBuilderInitialization() {
23760         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
23761         }
23762       }
create()23763       private static Builder create() {
23764         return new Builder();
23765       }
23766 
clear()23767       public Builder clear() {
23768         super.clear();
23769         row_ = com.google.protobuf.ByteString.EMPTY;
23770         bitField0_ = (bitField0_ & ~0x00000001);
23771         serviceName_ = "";
23772         bitField0_ = (bitField0_ & ~0x00000002);
23773         methodName_ = "";
23774         bitField0_ = (bitField0_ & ~0x00000004);
23775         request_ = com.google.protobuf.ByteString.EMPTY;
23776         bitField0_ = (bitField0_ & ~0x00000008);
23777         return this;
23778       }
23779 
clone()23780       public Builder clone() {
23781         return create().mergeFrom(buildPartial());
23782       }
23783 
23784       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()23785           getDescriptorForType() {
23786         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceCall_descriptor;
23787       }
23788 
getDefaultInstanceForType()23789       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getDefaultInstanceForType() {
23790         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
23791       }
23792 
build()23793       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall build() {
23794         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = buildPartial();
23795         if (!result.isInitialized()) {
23796           throw newUninitializedMessageException(result);
23797         }
23798         return result;
23799       }
23800 
buildPartial()23801       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall buildPartial() {
23802         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall(this);
23803         int from_bitField0_ = bitField0_;
23804         int to_bitField0_ = 0;
23805         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
23806           to_bitField0_ |= 0x00000001;
23807         }
23808         result.row_ = row_;
23809         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
23810           to_bitField0_ |= 0x00000002;
23811         }
23812         result.serviceName_ = serviceName_;
23813         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
23814           to_bitField0_ |= 0x00000004;
23815         }
23816         result.methodName_ = methodName_;
23817         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
23818           to_bitField0_ |= 0x00000008;
23819         }
23820         result.request_ = request_;
23821         result.bitField0_ = to_bitField0_;
23822         onBuilt();
23823         return result;
23824       }
23825 
mergeFrom(com.google.protobuf.Message other)23826       public Builder mergeFrom(com.google.protobuf.Message other) {
23827         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) {
23828           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall)other);
23829         } else {
23830           super.mergeFrom(other);
23831           return this;
23832         }
23833       }
23834 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other)23835       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall other) {
23836         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) return this;
23837         if (other.hasRow()) {
23838           setRow(other.getRow());
23839         }
23840         if (other.hasServiceName()) {
23841           bitField0_ |= 0x00000002;
23842           serviceName_ = other.serviceName_;
23843           onChanged();
23844         }
23845         if (other.hasMethodName()) {
23846           bitField0_ |= 0x00000004;
23847           methodName_ = other.methodName_;
23848           onChanged();
23849         }
23850         if (other.hasRequest()) {
23851           setRequest(other.getRequest());
23852         }
23853         this.mergeUnknownFields(other.getUnknownFields());
23854         return this;
23855       }
23856 
isInitialized()23857       public final boolean isInitialized() {
23858         if (!hasRow()) {
23859 
23860           return false;
23861         }
23862         if (!hasServiceName()) {
23863 
23864           return false;
23865         }
23866         if (!hasMethodName()) {
23867 
23868           return false;
23869         }
23870         if (!hasRequest()) {
23871 
23872           return false;
23873         }
23874         return true;
23875       }
23876 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)23877       public Builder mergeFrom(
23878           com.google.protobuf.CodedInputStream input,
23879           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
23880           throws java.io.IOException {
23881         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall parsedMessage = null;
23882         try {
23883           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
23884         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
23885           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall) e.getUnfinishedMessage();
23886           throw e;
23887         } finally {
23888           if (parsedMessage != null) {
23889             mergeFrom(parsedMessage);
23890           }
23891         }
23892         return this;
23893       }
23894       private int bitField0_;
23895 
23896       // required bytes row = 1;
23897       private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
23898       /**
23899        * <code>required bytes row = 1;</code>
23900        */
hasRow()23901       public boolean hasRow() {
23902         return ((bitField0_ & 0x00000001) == 0x00000001);
23903       }
23904       /**
23905        * <code>required bytes row = 1;</code>
23906        */
getRow()23907       public com.google.protobuf.ByteString getRow() {
23908         return row_;
23909       }
23910       /**
23911        * <code>required bytes row = 1;</code>
23912        */
setRow(com.google.protobuf.ByteString value)23913       public Builder setRow(com.google.protobuf.ByteString value) {
23914         if (value == null) {
23915     throw new NullPointerException();
23916   }
23917   bitField0_ |= 0x00000001;
23918         row_ = value;
23919         onChanged();
23920         return this;
23921       }
23922       /**
23923        * <code>required bytes row = 1;</code>
23924        */
clearRow()23925       public Builder clearRow() {
23926         bitField0_ = (bitField0_ & ~0x00000001);
23927         row_ = getDefaultInstance().getRow();
23928         onChanged();
23929         return this;
23930       }
23931 
23932       // required string service_name = 2;
23933       private java.lang.Object serviceName_ = "";
23934       /**
23935        * <code>required string service_name = 2;</code>
23936        */
hasServiceName()23937       public boolean hasServiceName() {
23938         return ((bitField0_ & 0x00000002) == 0x00000002);
23939       }
23940       /**
23941        * <code>required string service_name = 2;</code>
23942        */
getServiceName()23943       public java.lang.String getServiceName() {
23944         java.lang.Object ref = serviceName_;
23945         if (!(ref instanceof java.lang.String)) {
23946           java.lang.String s = ((com.google.protobuf.ByteString) ref)
23947               .toStringUtf8();
23948           serviceName_ = s;
23949           return s;
23950         } else {
23951           return (java.lang.String) ref;
23952         }
23953       }
23954       /**
23955        * <code>required string service_name = 2;</code>
23956        */
23957       public com.google.protobuf.ByteString
getServiceNameBytes()23958           getServiceNameBytes() {
23959         java.lang.Object ref = serviceName_;
23960         if (ref instanceof String) {
23961           com.google.protobuf.ByteString b =
23962               com.google.protobuf.ByteString.copyFromUtf8(
23963                   (java.lang.String) ref);
23964           serviceName_ = b;
23965           return b;
23966         } else {
23967           return (com.google.protobuf.ByteString) ref;
23968         }
23969       }
23970       /**
23971        * <code>required string service_name = 2;</code>
23972        */
setServiceName( java.lang.String value)23973       public Builder setServiceName(
23974           java.lang.String value) {
23975         if (value == null) {
23976     throw new NullPointerException();
23977   }
23978   bitField0_ |= 0x00000002;
23979         serviceName_ = value;
23980         onChanged();
23981         return this;
23982       }
23983       /**
23984        * <code>required string service_name = 2;</code>
23985        */
clearServiceName()23986       public Builder clearServiceName() {
23987         bitField0_ = (bitField0_ & ~0x00000002);
23988         serviceName_ = getDefaultInstance().getServiceName();
23989         onChanged();
23990         return this;
23991       }
23992       /**
23993        * <code>required string service_name = 2;</code>
23994        */
setServiceNameBytes( com.google.protobuf.ByteString value)23995       public Builder setServiceNameBytes(
23996           com.google.protobuf.ByteString value) {
23997         if (value == null) {
23998     throw new NullPointerException();
23999   }
24000   bitField0_ |= 0x00000002;
24001         serviceName_ = value;
24002         onChanged();
24003         return this;
24004       }
24005 
24006       // required string method_name = 3;
24007       private java.lang.Object methodName_ = "";
24008       /**
24009        * <code>required string method_name = 3;</code>
24010        */
hasMethodName()24011       public boolean hasMethodName() {
24012         return ((bitField0_ & 0x00000004) == 0x00000004);
24013       }
24014       /**
24015        * <code>required string method_name = 3;</code>
24016        */
getMethodName()24017       public java.lang.String getMethodName() {
24018         java.lang.Object ref = methodName_;
24019         if (!(ref instanceof java.lang.String)) {
24020           java.lang.String s = ((com.google.protobuf.ByteString) ref)
24021               .toStringUtf8();
24022           methodName_ = s;
24023           return s;
24024         } else {
24025           return (java.lang.String) ref;
24026         }
24027       }
24028       /**
24029        * <code>required string method_name = 3;</code>
24030        */
24031       public com.google.protobuf.ByteString
getMethodNameBytes()24032           getMethodNameBytes() {
24033         java.lang.Object ref = methodName_;
24034         if (ref instanceof String) {
24035           com.google.protobuf.ByteString b =
24036               com.google.protobuf.ByteString.copyFromUtf8(
24037                   (java.lang.String) ref);
24038           methodName_ = b;
24039           return b;
24040         } else {
24041           return (com.google.protobuf.ByteString) ref;
24042         }
24043       }
24044       /**
24045        * <code>required string method_name = 3;</code>
24046        */
setMethodName( java.lang.String value)24047       public Builder setMethodName(
24048           java.lang.String value) {
24049         if (value == null) {
24050     throw new NullPointerException();
24051   }
24052   bitField0_ |= 0x00000004;
24053         methodName_ = value;
24054         onChanged();
24055         return this;
24056       }
24057       /**
24058        * <code>required string method_name = 3;</code>
24059        */
clearMethodName()24060       public Builder clearMethodName() {
24061         bitField0_ = (bitField0_ & ~0x00000004);
24062         methodName_ = getDefaultInstance().getMethodName();
24063         onChanged();
24064         return this;
24065       }
24066       /**
24067        * <code>required string method_name = 3;</code>
24068        */
setMethodNameBytes( com.google.protobuf.ByteString value)24069       public Builder setMethodNameBytes(
24070           com.google.protobuf.ByteString value) {
24071         if (value == null) {
24072     throw new NullPointerException();
24073   }
24074   bitField0_ |= 0x00000004;
24075         methodName_ = value;
24076         onChanged();
24077         return this;
24078       }
24079 
24080       // required bytes request = 4;
24081       private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
24082       /**
24083        * <code>required bytes request = 4;</code>
24084        */
hasRequest()24085       public boolean hasRequest() {
24086         return ((bitField0_ & 0x00000008) == 0x00000008);
24087       }
24088       /**
24089        * <code>required bytes request = 4;</code>
24090        */
getRequest()24091       public com.google.protobuf.ByteString getRequest() {
24092         return request_;
24093       }
24094       /**
24095        * <code>required bytes request = 4;</code>
24096        */
setRequest(com.google.protobuf.ByteString value)24097       public Builder setRequest(com.google.protobuf.ByteString value) {
24098         if (value == null) {
24099     throw new NullPointerException();
24100   }
24101   bitField0_ |= 0x00000008;
24102         request_ = value;
24103         onChanged();
24104         return this;
24105       }
24106       /**
24107        * <code>required bytes request = 4;</code>
24108        */
clearRequest()24109       public Builder clearRequest() {
24110         bitField0_ = (bitField0_ & ~0x00000008);
24111         request_ = getDefaultInstance().getRequest();
24112         onChanged();
24113         return this;
24114       }
24115 
24116       // @@protoc_insertion_point(builder_scope:CoprocessorServiceCall)
24117     }
24118 
24119     static {
24120       defaultInstance = new CoprocessorServiceCall(true);
defaultInstance.initFields()24121       defaultInstance.initFields();
24122     }
24123 
24124     // @@protoc_insertion_point(class_scope:CoprocessorServiceCall)
24125   }
24126 
24127   public interface CoprocessorServiceResultOrBuilder
24128       extends com.google.protobuf.MessageOrBuilder {
24129 
24130     // optional .NameBytesPair value = 1;
24131     /**
24132      * <code>optional .NameBytesPair value = 1;</code>
24133      */
hasValue()24134     boolean hasValue();
24135     /**
24136      * <code>optional .NameBytesPair value = 1;</code>
24137      */
getValue()24138     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
24139     /**
24140      * <code>optional .NameBytesPair value = 1;</code>
24141      */
getValueOrBuilder()24142     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
24143   }
24144   /**
24145    * Protobuf type {@code CoprocessorServiceResult}
24146    */
24147   public static final class CoprocessorServiceResult extends
24148       com.google.protobuf.GeneratedMessage
24149       implements CoprocessorServiceResultOrBuilder {
24150     // Use CoprocessorServiceResult.newBuilder() to construct.
CoprocessorServiceResult(com.google.protobuf.GeneratedMessage.Builder<?> builder)24151     private CoprocessorServiceResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24152       super(builder);
24153       this.unknownFields = builder.getUnknownFields();
24154     }
CoprocessorServiceResult(boolean noInit)24155     private CoprocessorServiceResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24156 
24157     private static final CoprocessorServiceResult defaultInstance;
getDefaultInstance()24158     public static CoprocessorServiceResult getDefaultInstance() {
24159       return defaultInstance;
24160     }
24161 
getDefaultInstanceForType()24162     public CoprocessorServiceResult getDefaultInstanceForType() {
24163       return defaultInstance;
24164     }
24165 
24166     private final com.google.protobuf.UnknownFieldSet unknownFields;
24167     @java.lang.Override
24168     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()24169         getUnknownFields() {
24170       return this.unknownFields;
24171     }
CoprocessorServiceResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24172     private CoprocessorServiceResult(
24173         com.google.protobuf.CodedInputStream input,
24174         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24175         throws com.google.protobuf.InvalidProtocolBufferException {
24176       initFields();
24177       int mutable_bitField0_ = 0;
24178       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24179           com.google.protobuf.UnknownFieldSet.newBuilder();
24180       try {
24181         boolean done = false;
24182         while (!done) {
24183           int tag = input.readTag();
24184           switch (tag) {
24185             case 0:
24186               done = true;
24187               break;
24188             default: {
24189               if (!parseUnknownField(input, unknownFields,
24190                                      extensionRegistry, tag)) {
24191                 done = true;
24192               }
24193               break;
24194             }
24195             case 10: {
24196               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
24197               if (((bitField0_ & 0x00000001) == 0x00000001)) {
24198                 subBuilder = value_.toBuilder();
24199               }
24200               value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
24201               if (subBuilder != null) {
24202                 subBuilder.mergeFrom(value_);
24203                 value_ = subBuilder.buildPartial();
24204               }
24205               bitField0_ |= 0x00000001;
24206               break;
24207             }
24208           }
24209         }
24210       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24211         throw e.setUnfinishedMessage(this);
24212       } catch (java.io.IOException e) {
24213         throw new com.google.protobuf.InvalidProtocolBufferException(
24214             e.getMessage()).setUnfinishedMessage(this);
24215       } finally {
24216         this.unknownFields = unknownFields.build();
24217         makeExtensionsImmutable();
24218       }
24219     }
24220     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24221         getDescriptor() {
24222       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_descriptor;
24223     }
24224 
24225     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24226         internalGetFieldAccessorTable() {
24227       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_fieldAccessorTable
24228           .ensureFieldAccessorsInitialized(
24229               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
24230     }
24231 
24232     public static com.google.protobuf.Parser<CoprocessorServiceResult> PARSER =
24233         new com.google.protobuf.AbstractParser<CoprocessorServiceResult>() {
24234       public CoprocessorServiceResult parsePartialFrom(
24235           com.google.protobuf.CodedInputStream input,
24236           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24237           throws com.google.protobuf.InvalidProtocolBufferException {
24238         return new CoprocessorServiceResult(input, extensionRegistry);
24239       }
24240     };
24241 
24242     @java.lang.Override
getParserForType()24243     public com.google.protobuf.Parser<CoprocessorServiceResult> getParserForType() {
24244       return PARSER;
24245     }
24246 
24247     private int bitField0_;
24248     // optional .NameBytesPair value = 1;
24249     public static final int VALUE_FIELD_NUMBER = 1;
24250     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
24251     /**
24252      * <code>optional .NameBytesPair value = 1;</code>
24253      */
hasValue()24254     public boolean hasValue() {
24255       return ((bitField0_ & 0x00000001) == 0x00000001);
24256     }
24257     /**
24258      * <code>optional .NameBytesPair value = 1;</code>
24259      */
getValue()24260     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
24261       return value_;
24262     }
24263     /**
24264      * <code>optional .NameBytesPair value = 1;</code>
24265      */
getValueOrBuilder()24266     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
24267       return value_;
24268     }
24269 
initFields()24270     private void initFields() {
24271       value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24272     }
24273     private byte memoizedIsInitialized = -1;
isInitialized()24274     public final boolean isInitialized() {
24275       byte isInitialized = memoizedIsInitialized;
24276       if (isInitialized != -1) return isInitialized == 1;
24277 
24278       if (hasValue()) {
24279         if (!getValue().isInitialized()) {
24280           memoizedIsInitialized = 0;
24281           return false;
24282         }
24283       }
24284       memoizedIsInitialized = 1;
24285       return true;
24286     }
24287 
writeTo(com.google.protobuf.CodedOutputStream output)24288     public void writeTo(com.google.protobuf.CodedOutputStream output)
24289                         throws java.io.IOException {
24290       getSerializedSize();
24291       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24292         output.writeMessage(1, value_);
24293       }
24294       getUnknownFields().writeTo(output);
24295     }
24296 
24297     private int memoizedSerializedSize = -1;
getSerializedSize()24298     public int getSerializedSize() {
24299       int size = memoizedSerializedSize;
24300       if (size != -1) return size;
24301 
24302       size = 0;
24303       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24304         size += com.google.protobuf.CodedOutputStream
24305           .computeMessageSize(1, value_);
24306       }
24307       size += getUnknownFields().getSerializedSize();
24308       memoizedSerializedSize = size;
24309       return size;
24310     }
24311 
24312     private static final long serialVersionUID = 0L;
24313     @java.lang.Override
writeReplace()24314     protected java.lang.Object writeReplace()
24315         throws java.io.ObjectStreamException {
24316       return super.writeReplace();
24317     }
24318 
24319     @java.lang.Override
equals(final java.lang.Object obj)24320     public boolean equals(final java.lang.Object obj) {
24321       if (obj == this) {
24322        return true;
24323       }
24324       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)) {
24325         return super.equals(obj);
24326       }
24327       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) obj;
24328 
24329       boolean result = true;
24330       result = result && (hasValue() == other.hasValue());
24331       if (hasValue()) {
24332         result = result && getValue()
24333             .equals(other.getValue());
24334       }
24335       result = result &&
24336           getUnknownFields().equals(other.getUnknownFields());
24337       return result;
24338     }
24339 
24340     private int memoizedHashCode = 0;
24341     @java.lang.Override
hashCode()24342     public int hashCode() {
24343       if (memoizedHashCode != 0) {
24344         return memoizedHashCode;
24345       }
24346       int hash = 41;
24347       hash = (19 * hash) + getDescriptorForType().hashCode();
24348       if (hasValue()) {
24349         hash = (37 * hash) + VALUE_FIELD_NUMBER;
24350         hash = (53 * hash) + getValue().hashCode();
24351       }
24352       hash = (29 * hash) + getUnknownFields().hashCode();
24353       memoizedHashCode = hash;
24354       return hash;
24355     }
24356 
parseFrom( com.google.protobuf.ByteString data)24357     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
24358         com.google.protobuf.ByteString data)
24359         throws com.google.protobuf.InvalidProtocolBufferException {
24360       return PARSER.parseFrom(data);
24361     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24362     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
24363         com.google.protobuf.ByteString data,
24364         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24365         throws com.google.protobuf.InvalidProtocolBufferException {
24366       return PARSER.parseFrom(data, extensionRegistry);
24367     }
parseFrom(byte[] data)24368     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(byte[] data)
24369         throws com.google.protobuf.InvalidProtocolBufferException {
24370       return PARSER.parseFrom(data);
24371     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24372     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
24373         byte[] data,
24374         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24375         throws com.google.protobuf.InvalidProtocolBufferException {
24376       return PARSER.parseFrom(data, extensionRegistry);
24377     }
parseFrom(java.io.InputStream input)24378     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(java.io.InputStream input)
24379         throws java.io.IOException {
24380       return PARSER.parseFrom(input);
24381     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24382     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
24383         java.io.InputStream input,
24384         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24385         throws java.io.IOException {
24386       return PARSER.parseFrom(input, extensionRegistry);
24387     }
parseDelimitedFrom(java.io.InputStream input)24388     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(java.io.InputStream input)
24389         throws java.io.IOException {
24390       return PARSER.parseDelimitedFrom(input);
24391     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24392     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseDelimitedFrom(
24393         java.io.InputStream input,
24394         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24395         throws java.io.IOException {
24396       return PARSER.parseDelimitedFrom(input, extensionRegistry);
24397     }
parseFrom( com.google.protobuf.CodedInputStream input)24398     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
24399         com.google.protobuf.CodedInputStream input)
24400         throws java.io.IOException {
24401       return PARSER.parseFrom(input);
24402     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24403     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parseFrom(
24404         com.google.protobuf.CodedInputStream input,
24405         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24406         throws java.io.IOException {
24407       return PARSER.parseFrom(input, extensionRegistry);
24408     }
24409 
newBuilder()24410     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()24411     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype)24412     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult prototype) {
24413       return newBuilder().mergeFrom(prototype);
24414     }
toBuilder()24415     public Builder toBuilder() { return newBuilder(this); }
24416 
24417     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)24418     protected Builder newBuilderForType(
24419         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24420       Builder builder = new Builder(parent);
24421       return builder;
24422     }
24423     /**
24424      * Protobuf type {@code CoprocessorServiceResult}
24425      */
24426     public static final class Builder extends
24427         com.google.protobuf.GeneratedMessage.Builder<Builder>
24428        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder {
24429       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24430           getDescriptor() {
24431         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_descriptor;
24432       }
24433 
24434       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24435           internalGetFieldAccessorTable() {
24436         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_fieldAccessorTable
24437             .ensureFieldAccessorsInitialized(
24438                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder.class);
24439       }
24440 
24441       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder()
Builder()24442       private Builder() {
24443         maybeForceBuilderInitialization();
24444       }
24445 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)24446       private Builder(
24447           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
24448         super(parent);
24449         maybeForceBuilderInitialization();
24450       }
maybeForceBuilderInitialization()24451       private void maybeForceBuilderInitialization() {
24452         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
24453           getValueFieldBuilder();
24454         }
24455       }
create()24456       private static Builder create() {
24457         return new Builder();
24458       }
24459 
clear()24460       public Builder clear() {
24461         super.clear();
24462         if (valueBuilder_ == null) {
24463           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24464         } else {
24465           valueBuilder_.clear();
24466         }
24467         bitField0_ = (bitField0_ & ~0x00000001);
24468         return this;
24469       }
24470 
clone()24471       public Builder clone() {
24472         return create().mergeFrom(buildPartial());
24473       }
24474 
24475       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()24476           getDescriptorForType() {
24477         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResult_descriptor;
24478       }
24479 
getDefaultInstanceForType()24480       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getDefaultInstanceForType() {
24481         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
24482       }
24483 
build()24484       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult build() {
24485         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = buildPartial();
24486         if (!result.isInitialized()) {
24487           throw newUninitializedMessageException(result);
24488         }
24489         return result;
24490       }
24491 
buildPartial()24492       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult buildPartial() {
24493         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult(this);
24494         int from_bitField0_ = bitField0_;
24495         int to_bitField0_ = 0;
24496         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
24497           to_bitField0_ |= 0x00000001;
24498         }
24499         if (valueBuilder_ == null) {
24500           result.value_ = value_;
24501         } else {
24502           result.value_ = valueBuilder_.build();
24503         }
24504         result.bitField0_ = to_bitField0_;
24505         onBuilt();
24506         return result;
24507       }
24508 
mergeFrom(com.google.protobuf.Message other)24509       public Builder mergeFrom(com.google.protobuf.Message other) {
24510         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) {
24511           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult)other);
24512         } else {
24513           super.mergeFrom(other);
24514           return this;
24515         }
24516       }
24517 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other)24518       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult other) {
24519         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) return this;
24520         if (other.hasValue()) {
24521           mergeValue(other.getValue());
24522         }
24523         this.mergeUnknownFields(other.getUnknownFields());
24524         return this;
24525       }
24526 
isInitialized()24527       public final boolean isInitialized() {
24528         if (hasValue()) {
24529           if (!getValue().isInitialized()) {
24530 
24531             return false;
24532           }
24533         }
24534         return true;
24535       }
24536 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24537       public Builder mergeFrom(
24538           com.google.protobuf.CodedInputStream input,
24539           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24540           throws java.io.IOException {
24541         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult parsedMessage = null;
24542         try {
24543           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
24544         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24545           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult) e.getUnfinishedMessage();
24546           throw e;
24547         } finally {
24548           if (parsedMessage != null) {
24549             mergeFrom(parsedMessage);
24550           }
24551         }
24552         return this;
24553       }
24554       private int bitField0_;
24555 
24556       // optional .NameBytesPair value = 1;
24557       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24558       private com.google.protobuf.SingleFieldBuilder<
24559           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
24560       /**
24561        * <code>optional .NameBytesPair value = 1;</code>
24562        */
hasValue()24563       public boolean hasValue() {
24564         return ((bitField0_ & 0x00000001) == 0x00000001);
24565       }
24566       /**
24567        * <code>optional .NameBytesPair value = 1;</code>
24568        */
getValue()24569       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
24570         if (valueBuilder_ == null) {
24571           return value_;
24572         } else {
24573           return valueBuilder_.getMessage();
24574         }
24575       }
24576       /**
24577        * <code>optional .NameBytesPair value = 1;</code>
24578        */
setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)24579       public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
24580         if (valueBuilder_ == null) {
24581           if (value == null) {
24582             throw new NullPointerException();
24583           }
24584           value_ = value;
24585           onChanged();
24586         } else {
24587           valueBuilder_.setMessage(value);
24588         }
24589         bitField0_ |= 0x00000001;
24590         return this;
24591       }
24592       /**
24593        * <code>optional .NameBytesPair value = 1;</code>
24594        */
setValue( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)24595       public Builder setValue(
24596           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
24597         if (valueBuilder_ == null) {
24598           value_ = builderForValue.build();
24599           onChanged();
24600         } else {
24601           valueBuilder_.setMessage(builderForValue.build());
24602         }
24603         bitField0_ |= 0x00000001;
24604         return this;
24605       }
24606       /**
24607        * <code>optional .NameBytesPair value = 1;</code>
24608        */
mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)24609       public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
24610         if (valueBuilder_ == null) {
24611           if (((bitField0_ & 0x00000001) == 0x00000001) &&
24612               value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
24613             value_ =
24614               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
24615           } else {
24616             value_ = value;
24617           }
24618           onChanged();
24619         } else {
24620           valueBuilder_.mergeFrom(value);
24621         }
24622         bitField0_ |= 0x00000001;
24623         return this;
24624       }
24625       /**
24626        * <code>optional .NameBytesPair value = 1;</code>
24627        */
clearValue()24628       public Builder clearValue() {
24629         if (valueBuilder_ == null) {
24630           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
24631           onChanged();
24632         } else {
24633           valueBuilder_.clear();
24634         }
24635         bitField0_ = (bitField0_ & ~0x00000001);
24636         return this;
24637       }
24638       /**
24639        * <code>optional .NameBytesPair value = 1;</code>
24640        */
getValueBuilder()24641       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
24642         bitField0_ |= 0x00000001;
24643         onChanged();
24644         return getValueFieldBuilder().getBuilder();
24645       }
24646       /**
24647        * <code>optional .NameBytesPair value = 1;</code>
24648        */
getValueOrBuilder()24649       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
24650         if (valueBuilder_ != null) {
24651           return valueBuilder_.getMessageOrBuilder();
24652         } else {
24653           return value_;
24654         }
24655       }
24656       /**
24657        * <code>optional .NameBytesPair value = 1;</code>
24658        */
24659       private com.google.protobuf.SingleFieldBuilder<
24660           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getValueFieldBuilder()24661           getValueFieldBuilder() {
24662         if (valueBuilder_ == null) {
24663           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
24664               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
24665                   value_,
24666                   getParentForChildren(),
24667                   isClean());
24668           value_ = null;
24669         }
24670         return valueBuilder_;
24671       }
24672 
24673       // @@protoc_insertion_point(builder_scope:CoprocessorServiceResult)
24674     }
24675 
24676     static {
24677       defaultInstance = new CoprocessorServiceResult(true);
defaultInstance.initFields()24678       defaultInstance.initFields();
24679     }
24680 
24681     // @@protoc_insertion_point(class_scope:CoprocessorServiceResult)
24682   }
24683 
24684   public interface CoprocessorServiceRequestOrBuilder
24685       extends com.google.protobuf.MessageOrBuilder {
24686 
24687     // required .RegionSpecifier region = 1;
24688     /**
24689      * <code>required .RegionSpecifier region = 1;</code>
24690      */
hasRegion()24691     boolean hasRegion();
24692     /**
24693      * <code>required .RegionSpecifier region = 1;</code>
24694      */
getRegion()24695     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
24696     /**
24697      * <code>required .RegionSpecifier region = 1;</code>
24698      */
getRegionOrBuilder()24699     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
24700 
24701     // required .CoprocessorServiceCall call = 2;
24702     /**
24703      * <code>required .CoprocessorServiceCall call = 2;</code>
24704      */
hasCall()24705     boolean hasCall();
24706     /**
24707      * <code>required .CoprocessorServiceCall call = 2;</code>
24708      */
getCall()24709     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall();
24710     /**
24711      * <code>required .CoprocessorServiceCall call = 2;</code>
24712      */
getCallOrBuilder()24713     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder();
24714   }
24715   /**
24716    * Protobuf type {@code CoprocessorServiceRequest}
24717    */
24718   public static final class CoprocessorServiceRequest extends
24719       com.google.protobuf.GeneratedMessage
24720       implements CoprocessorServiceRequestOrBuilder {
24721     // Use CoprocessorServiceRequest.newBuilder() to construct.
CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)24722     private CoprocessorServiceRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
24723       super(builder);
24724       this.unknownFields = builder.getUnknownFields();
24725     }
CoprocessorServiceRequest(boolean noInit)24726     private CoprocessorServiceRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
24727 
24728     private static final CoprocessorServiceRequest defaultInstance;
getDefaultInstance()24729     public static CoprocessorServiceRequest getDefaultInstance() {
24730       return defaultInstance;
24731     }
24732 
getDefaultInstanceForType()24733     public CoprocessorServiceRequest getDefaultInstanceForType() {
24734       return defaultInstance;
24735     }
24736 
24737     private final com.google.protobuf.UnknownFieldSet unknownFields;
24738     @java.lang.Override
24739     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()24740         getUnknownFields() {
24741       return this.unknownFields;
24742     }
CoprocessorServiceRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24743     private CoprocessorServiceRequest(
24744         com.google.protobuf.CodedInputStream input,
24745         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24746         throws com.google.protobuf.InvalidProtocolBufferException {
24747       initFields();
24748       int mutable_bitField0_ = 0;
24749       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
24750           com.google.protobuf.UnknownFieldSet.newBuilder();
24751       try {
24752         boolean done = false;
24753         while (!done) {
24754           int tag = input.readTag();
24755           switch (tag) {
24756             case 0:
24757               done = true;
24758               break;
24759             default: {
24760               if (!parseUnknownField(input, unknownFields,
24761                                      extensionRegistry, tag)) {
24762                 done = true;
24763               }
24764               break;
24765             }
24766             case 10: {
24767               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
24768               if (((bitField0_ & 0x00000001) == 0x00000001)) {
24769                 subBuilder = region_.toBuilder();
24770               }
24771               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
24772               if (subBuilder != null) {
24773                 subBuilder.mergeFrom(region_);
24774                 region_ = subBuilder.buildPartial();
24775               }
24776               bitField0_ |= 0x00000001;
24777               break;
24778             }
24779             case 18: {
24780               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
24781               if (((bitField0_ & 0x00000002) == 0x00000002)) {
24782                 subBuilder = call_.toBuilder();
24783               }
24784               call_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
24785               if (subBuilder != null) {
24786                 subBuilder.mergeFrom(call_);
24787                 call_ = subBuilder.buildPartial();
24788               }
24789               bitField0_ |= 0x00000002;
24790               break;
24791             }
24792           }
24793         }
24794       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
24795         throw e.setUnfinishedMessage(this);
24796       } catch (java.io.IOException e) {
24797         throw new com.google.protobuf.InvalidProtocolBufferException(
24798             e.getMessage()).setUnfinishedMessage(this);
24799       } finally {
24800         this.unknownFields = unknownFields.build();
24801         makeExtensionsImmutable();
24802       }
24803     }
24804     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()24805         getDescriptor() {
24806       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor;
24807     }
24808 
24809     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()24810         internalGetFieldAccessorTable() {
24811       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable
24812           .ensureFieldAccessorsInitialized(
24813               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
24814     }
24815 
24816     public static com.google.protobuf.Parser<CoprocessorServiceRequest> PARSER =
24817         new com.google.protobuf.AbstractParser<CoprocessorServiceRequest>() {
24818       public CoprocessorServiceRequest parsePartialFrom(
24819           com.google.protobuf.CodedInputStream input,
24820           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24821           throws com.google.protobuf.InvalidProtocolBufferException {
24822         return new CoprocessorServiceRequest(input, extensionRegistry);
24823       }
24824     };
24825 
24826     @java.lang.Override
getParserForType()24827     public com.google.protobuf.Parser<CoprocessorServiceRequest> getParserForType() {
24828       return PARSER;
24829     }
24830 
24831     private int bitField0_;
24832     // required .RegionSpecifier region = 1;
24833     public static final int REGION_FIELD_NUMBER = 1;
24834     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
24835     /**
24836      * <code>required .RegionSpecifier region = 1;</code>
24837      */
hasRegion()24838     public boolean hasRegion() {
24839       return ((bitField0_ & 0x00000001) == 0x00000001);
24840     }
24841     /**
24842      * <code>required .RegionSpecifier region = 1;</code>
24843      */
getRegion()24844     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
24845       return region_;
24846     }
24847     /**
24848      * <code>required .RegionSpecifier region = 1;</code>
24849      */
getRegionOrBuilder()24850     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
24851       return region_;
24852     }
24853 
24854     // required .CoprocessorServiceCall call = 2;
24855     public static final int CALL_FIELD_NUMBER = 2;
24856     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_;
24857     /**
24858      * <code>required .CoprocessorServiceCall call = 2;</code>
24859      */
hasCall()24860     public boolean hasCall() {
24861       return ((bitField0_ & 0x00000002) == 0x00000002);
24862     }
24863     /**
24864      * <code>required .CoprocessorServiceCall call = 2;</code>
24865      */
getCall()24866     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
24867       return call_;
24868     }
24869     /**
24870      * <code>required .CoprocessorServiceCall call = 2;</code>
24871      */
getCallOrBuilder()24872     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
24873       return call_;
24874     }
24875 
initFields()24876     private void initFields() {
24877       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
24878       call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
24879     }
24880     private byte memoizedIsInitialized = -1;
isInitialized()24881     public final boolean isInitialized() {
24882       byte isInitialized = memoizedIsInitialized;
24883       if (isInitialized != -1) return isInitialized == 1;
24884 
24885       if (!hasRegion()) {
24886         memoizedIsInitialized = 0;
24887         return false;
24888       }
24889       if (!hasCall()) {
24890         memoizedIsInitialized = 0;
24891         return false;
24892       }
24893       if (!getRegion().isInitialized()) {
24894         memoizedIsInitialized = 0;
24895         return false;
24896       }
24897       if (!getCall().isInitialized()) {
24898         memoizedIsInitialized = 0;
24899         return false;
24900       }
24901       memoizedIsInitialized = 1;
24902       return true;
24903     }
24904 
writeTo(com.google.protobuf.CodedOutputStream output)24905     public void writeTo(com.google.protobuf.CodedOutputStream output)
24906                         throws java.io.IOException {
24907       getSerializedSize();
24908       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24909         output.writeMessage(1, region_);
24910       }
24911       if (((bitField0_ & 0x00000002) == 0x00000002)) {
24912         output.writeMessage(2, call_);
24913       }
24914       getUnknownFields().writeTo(output);
24915     }
24916 
24917     private int memoizedSerializedSize = -1;
getSerializedSize()24918     public int getSerializedSize() {
24919       int size = memoizedSerializedSize;
24920       if (size != -1) return size;
24921 
24922       size = 0;
24923       if (((bitField0_ & 0x00000001) == 0x00000001)) {
24924         size += com.google.protobuf.CodedOutputStream
24925           .computeMessageSize(1, region_);
24926       }
24927       if (((bitField0_ & 0x00000002) == 0x00000002)) {
24928         size += com.google.protobuf.CodedOutputStream
24929           .computeMessageSize(2, call_);
24930       }
24931       size += getUnknownFields().getSerializedSize();
24932       memoizedSerializedSize = size;
24933       return size;
24934     }
24935 
24936     private static final long serialVersionUID = 0L;
24937     @java.lang.Override
writeReplace()24938     protected java.lang.Object writeReplace()
24939         throws java.io.ObjectStreamException {
24940       return super.writeReplace();
24941     }
24942 
24943     @java.lang.Override
equals(final java.lang.Object obj)24944     public boolean equals(final java.lang.Object obj) {
24945       if (obj == this) {
24946        return true;
24947       }
24948       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)) {
24949         return super.equals(obj);
24950       }
24951       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) obj;
24952 
24953       boolean result = true;
24954       result = result && (hasRegion() == other.hasRegion());
24955       if (hasRegion()) {
24956         result = result && getRegion()
24957             .equals(other.getRegion());
24958       }
24959       result = result && (hasCall() == other.hasCall());
24960       if (hasCall()) {
24961         result = result && getCall()
24962             .equals(other.getCall());
24963       }
24964       result = result &&
24965           getUnknownFields().equals(other.getUnknownFields());
24966       return result;
24967     }
24968 
24969     private int memoizedHashCode = 0;
24970     @java.lang.Override
hashCode()24971     public int hashCode() {
24972       if (memoizedHashCode != 0) {
24973         return memoizedHashCode;
24974       }
24975       int hash = 41;
24976       hash = (19 * hash) + getDescriptorForType().hashCode();
24977       if (hasRegion()) {
24978         hash = (37 * hash) + REGION_FIELD_NUMBER;
24979         hash = (53 * hash) + getRegion().hashCode();
24980       }
24981       if (hasCall()) {
24982         hash = (37 * hash) + CALL_FIELD_NUMBER;
24983         hash = (53 * hash) + getCall().hashCode();
24984       }
24985       hash = (29 * hash) + getUnknownFields().hashCode();
24986       memoizedHashCode = hash;
24987       return hash;
24988     }
24989 
parseFrom( com.google.protobuf.ByteString data)24990     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24991         com.google.protobuf.ByteString data)
24992         throws com.google.protobuf.InvalidProtocolBufferException {
24993       return PARSER.parseFrom(data);
24994     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)24995     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
24996         com.google.protobuf.ByteString data,
24997         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
24998         throws com.google.protobuf.InvalidProtocolBufferException {
24999       return PARSER.parseFrom(data, extensionRegistry);
25000     }
parseFrom(byte[] data)25001     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(byte[] data)
25002         throws com.google.protobuf.InvalidProtocolBufferException {
25003       return PARSER.parseFrom(data);
25004     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25005     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
25006         byte[] data,
25007         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25008         throws com.google.protobuf.InvalidProtocolBufferException {
25009       return PARSER.parseFrom(data, extensionRegistry);
25010     }
parseFrom(java.io.InputStream input)25011     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(java.io.InputStream input)
25012         throws java.io.IOException {
25013       return PARSER.parseFrom(input);
25014     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25015     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
25016         java.io.InputStream input,
25017         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25018         throws java.io.IOException {
25019       return PARSER.parseFrom(input, extensionRegistry);
25020     }
parseDelimitedFrom(java.io.InputStream input)25021     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(java.io.InputStream input)
25022         throws java.io.IOException {
25023       return PARSER.parseDelimitedFrom(input);
25024     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25025     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseDelimitedFrom(
25026         java.io.InputStream input,
25027         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25028         throws java.io.IOException {
25029       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25030     }
parseFrom( com.google.protobuf.CodedInputStream input)25031     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
25032         com.google.protobuf.CodedInputStream input)
25033         throws java.io.IOException {
25034       return PARSER.parseFrom(input);
25035     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25036     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parseFrom(
25037         com.google.protobuf.CodedInputStream input,
25038         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25039         throws java.io.IOException {
25040       return PARSER.parseFrom(input, extensionRegistry);
25041     }
25042 
newBuilder()25043     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()25044     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype)25045     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest prototype) {
25046       return newBuilder().mergeFrom(prototype);
25047     }
toBuilder()25048     public Builder toBuilder() { return newBuilder(this); }
25049 
25050     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)25051     protected Builder newBuilderForType(
25052         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25053       Builder builder = new Builder(parent);
25054       return builder;
25055     }
25056     /**
25057      * Protobuf type {@code CoprocessorServiceRequest}
25058      */
25059     public static final class Builder extends
25060         com.google.protobuf.GeneratedMessage.Builder<Builder>
25061        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequestOrBuilder {
25062       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25063           getDescriptor() {
25064         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor;
25065       }
25066 
25067       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25068           internalGetFieldAccessorTable() {
25069         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_fieldAccessorTable
25070             .ensureFieldAccessorsInitialized(
25071                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
25072       }
25073 
25074       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.newBuilder()
Builder()25075       private Builder() {
25076         maybeForceBuilderInitialization();
25077       }
25078 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)25079       private Builder(
25080           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25081         super(parent);
25082         maybeForceBuilderInitialization();
25083       }
maybeForceBuilderInitialization()25084       private void maybeForceBuilderInitialization() {
25085         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25086           getRegionFieldBuilder();
25087           getCallFieldBuilder();
25088         }
25089       }
create()25090       private static Builder create() {
25091         return new Builder();
25092       }
25093 
clear()25094       public Builder clear() {
25095         super.clear();
25096         if (regionBuilder_ == null) {
25097           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25098         } else {
25099           regionBuilder_.clear();
25100         }
25101         bitField0_ = (bitField0_ & ~0x00000001);
25102         if (callBuilder_ == null) {
25103           call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25104         } else {
25105           callBuilder_.clear();
25106         }
25107         bitField0_ = (bitField0_ & ~0x00000002);
25108         return this;
25109       }
25110 
clone()25111       public Builder clone() {
25112         return create().mergeFrom(buildPartial());
25113       }
25114 
25115       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()25116           getDescriptorForType() {
25117         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceRequest_descriptor;
25118       }
25119 
getDefaultInstanceForType()25120       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest getDefaultInstanceForType() {
25121         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
25122       }
25123 
build()25124       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest build() {
25125         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = buildPartial();
25126         if (!result.isInitialized()) {
25127           throw newUninitializedMessageException(result);
25128         }
25129         return result;
25130       }
25131 
buildPartial()25132       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest buildPartial() {
25133         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest(this);
25134         int from_bitField0_ = bitField0_;
25135         int to_bitField0_ = 0;
25136         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
25137           to_bitField0_ |= 0x00000001;
25138         }
25139         if (regionBuilder_ == null) {
25140           result.region_ = region_;
25141         } else {
25142           result.region_ = regionBuilder_.build();
25143         }
25144         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
25145           to_bitField0_ |= 0x00000002;
25146         }
25147         if (callBuilder_ == null) {
25148           result.call_ = call_;
25149         } else {
25150           result.call_ = callBuilder_.build();
25151         }
25152         result.bitField0_ = to_bitField0_;
25153         onBuilt();
25154         return result;
25155       }
25156 
mergeFrom(com.google.protobuf.Message other)25157       public Builder mergeFrom(com.google.protobuf.Message other) {
25158         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) {
25159           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)other);
25160         } else {
25161           super.mergeFrom(other);
25162           return this;
25163         }
25164       }
25165 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other)25166       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest other) {
25167         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance()) return this;
25168         if (other.hasRegion()) {
25169           mergeRegion(other.getRegion());
25170         }
25171         if (other.hasCall()) {
25172           mergeCall(other.getCall());
25173         }
25174         this.mergeUnknownFields(other.getUnknownFields());
25175         return this;
25176       }
25177 
isInitialized()25178       public final boolean isInitialized() {
25179         if (!hasRegion()) {
25180 
25181           return false;
25182         }
25183         if (!hasCall()) {
25184 
25185           return false;
25186         }
25187         if (!getRegion().isInitialized()) {
25188 
25189           return false;
25190         }
25191         if (!getCall().isInitialized()) {
25192 
25193           return false;
25194         }
25195         return true;
25196       }
25197 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25198       public Builder mergeFrom(
25199           com.google.protobuf.CodedInputStream input,
25200           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25201           throws java.io.IOException {
25202         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest parsedMessage = null;
25203         try {
25204           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25205         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25206           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest) e.getUnfinishedMessage();
25207           throw e;
25208         } finally {
25209           if (parsedMessage != null) {
25210             mergeFrom(parsedMessage);
25211           }
25212         }
25213         return this;
25214       }
25215       private int bitField0_;
25216 
25217       // required .RegionSpecifier region = 1;
25218       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25219       private com.google.protobuf.SingleFieldBuilder<
25220           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
25221       /**
25222        * <code>required .RegionSpecifier region = 1;</code>
25223        */
hasRegion()25224       public boolean hasRegion() {
25225         return ((bitField0_ & 0x00000001) == 0x00000001);
25226       }
25227       /**
25228        * <code>required .RegionSpecifier region = 1;</code>
25229        */
getRegion()25230       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
25231         if (regionBuilder_ == null) {
25232           return region_;
25233         } else {
25234           return regionBuilder_.getMessage();
25235         }
25236       }
25237       /**
25238        * <code>required .RegionSpecifier region = 1;</code>
25239        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)25240       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
25241         if (regionBuilder_ == null) {
25242           if (value == null) {
25243             throw new NullPointerException();
25244           }
25245           region_ = value;
25246           onChanged();
25247         } else {
25248           regionBuilder_.setMessage(value);
25249         }
25250         bitField0_ |= 0x00000001;
25251         return this;
25252       }
25253       /**
25254        * <code>required .RegionSpecifier region = 1;</code>
25255        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)25256       public Builder setRegion(
25257           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
25258         if (regionBuilder_ == null) {
25259           region_ = builderForValue.build();
25260           onChanged();
25261         } else {
25262           regionBuilder_.setMessage(builderForValue.build());
25263         }
25264         bitField0_ |= 0x00000001;
25265         return this;
25266       }
25267       /**
25268        * <code>required .RegionSpecifier region = 1;</code>
25269        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)25270       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
25271         if (regionBuilder_ == null) {
25272           if (((bitField0_ & 0x00000001) == 0x00000001) &&
25273               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
25274             region_ =
25275               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
25276           } else {
25277             region_ = value;
25278           }
25279           onChanged();
25280         } else {
25281           regionBuilder_.mergeFrom(value);
25282         }
25283         bitField0_ |= 0x00000001;
25284         return this;
25285       }
25286       /**
25287        * <code>required .RegionSpecifier region = 1;</code>
25288        */
clearRegion()25289       public Builder clearRegion() {
25290         if (regionBuilder_ == null) {
25291           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25292           onChanged();
25293         } else {
25294           regionBuilder_.clear();
25295         }
25296         bitField0_ = (bitField0_ & ~0x00000001);
25297         return this;
25298       }
25299       /**
25300        * <code>required .RegionSpecifier region = 1;</code>
25301        */
getRegionBuilder()25302       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
25303         bitField0_ |= 0x00000001;
25304         onChanged();
25305         return getRegionFieldBuilder().getBuilder();
25306       }
25307       /**
25308        * <code>required .RegionSpecifier region = 1;</code>
25309        */
getRegionOrBuilder()25310       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
25311         if (regionBuilder_ != null) {
25312           return regionBuilder_.getMessageOrBuilder();
25313         } else {
25314           return region_;
25315         }
25316       }
25317       /**
25318        * <code>required .RegionSpecifier region = 1;</code>
25319        */
25320       private com.google.protobuf.SingleFieldBuilder<
25321           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()25322           getRegionFieldBuilder() {
25323         if (regionBuilder_ == null) {
25324           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
25325               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
25326                   region_,
25327                   getParentForChildren(),
25328                   isClean());
25329           region_ = null;
25330         }
25331         return regionBuilder_;
25332       }
25333 
25334       // required .CoprocessorServiceCall call = 2;
25335       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25336       private com.google.protobuf.SingleFieldBuilder<
25337           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> callBuilder_;
25338       /**
25339        * <code>required .CoprocessorServiceCall call = 2;</code>
25340        */
hasCall()25341       public boolean hasCall() {
25342         return ((bitField0_ & 0x00000002) == 0x00000002);
25343       }
25344       /**
25345        * <code>required .CoprocessorServiceCall call = 2;</code>
25346        */
getCall()25347       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getCall() {
25348         if (callBuilder_ == null) {
25349           return call_;
25350         } else {
25351           return callBuilder_.getMessage();
25352         }
25353       }
25354       /**
25355        * <code>required .CoprocessorServiceCall call = 2;</code>
25356        */
setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value)25357       public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
25358         if (callBuilder_ == null) {
25359           if (value == null) {
25360             throw new NullPointerException();
25361           }
25362           call_ = value;
25363           onChanged();
25364         } else {
25365           callBuilder_.setMessage(value);
25366         }
25367         bitField0_ |= 0x00000002;
25368         return this;
25369       }
25370       /**
25371        * <code>required .CoprocessorServiceCall call = 2;</code>
25372        */
setCall( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue)25373       public Builder setCall(
25374           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
25375         if (callBuilder_ == null) {
25376           call_ = builderForValue.build();
25377           onChanged();
25378         } else {
25379           callBuilder_.setMessage(builderForValue.build());
25380         }
25381         bitField0_ |= 0x00000002;
25382         return this;
25383       }
25384       /**
25385        * <code>required .CoprocessorServiceCall call = 2;</code>
25386        */
mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value)25387       public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
25388         if (callBuilder_ == null) {
25389           if (((bitField0_ & 0x00000002) == 0x00000002) &&
25390               call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
25391             call_ =
25392               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(call_).mergeFrom(value).buildPartial();
25393           } else {
25394             call_ = value;
25395           }
25396           onChanged();
25397         } else {
25398           callBuilder_.mergeFrom(value);
25399         }
25400         bitField0_ |= 0x00000002;
25401         return this;
25402       }
25403       /**
25404        * <code>required .CoprocessorServiceCall call = 2;</code>
25405        */
clearCall()25406       public Builder clearCall() {
25407         if (callBuilder_ == null) {
25408           call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
25409           onChanged();
25410         } else {
25411           callBuilder_.clear();
25412         }
25413         bitField0_ = (bitField0_ & ~0x00000002);
25414         return this;
25415       }
25416       /**
25417        * <code>required .CoprocessorServiceCall call = 2;</code>
25418        */
getCallBuilder()25419       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getCallBuilder() {
25420         bitField0_ |= 0x00000002;
25421         onChanged();
25422         return getCallFieldBuilder().getBuilder();
25423       }
25424       /**
25425        * <code>required .CoprocessorServiceCall call = 2;</code>
25426        */
getCallOrBuilder()25427       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getCallOrBuilder() {
25428         if (callBuilder_ != null) {
25429           return callBuilder_.getMessageOrBuilder();
25430         } else {
25431           return call_;
25432         }
25433       }
25434       /**
25435        * <code>required .CoprocessorServiceCall call = 2;</code>
25436        */
25437       private com.google.protobuf.SingleFieldBuilder<
25438           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>
getCallFieldBuilder()25439           getCallFieldBuilder() {
25440         if (callBuilder_ == null) {
25441           callBuilder_ = new com.google.protobuf.SingleFieldBuilder<
25442               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
25443                   call_,
25444                   getParentForChildren(),
25445                   isClean());
25446           call_ = null;
25447         }
25448         return callBuilder_;
25449       }
25450 
25451       // @@protoc_insertion_point(builder_scope:CoprocessorServiceRequest)
25452     }
25453 
25454     static {
25455       defaultInstance = new CoprocessorServiceRequest(true);
defaultInstance.initFields()25456       defaultInstance.initFields();
25457     }
25458 
25459     // @@protoc_insertion_point(class_scope:CoprocessorServiceRequest)
25460   }
25461 
25462   public interface CoprocessorServiceResponseOrBuilder
25463       extends com.google.protobuf.MessageOrBuilder {
25464 
25465     // required .RegionSpecifier region = 1;
25466     /**
25467      * <code>required .RegionSpecifier region = 1;</code>
25468      */
hasRegion()25469     boolean hasRegion();
25470     /**
25471      * <code>required .RegionSpecifier region = 1;</code>
25472      */
getRegion()25473     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
25474     /**
25475      * <code>required .RegionSpecifier region = 1;</code>
25476      */
getRegionOrBuilder()25477     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
25478 
25479     // required .NameBytesPair value = 2;
25480     /**
25481      * <code>required .NameBytesPair value = 2;</code>
25482      */
hasValue()25483     boolean hasValue();
25484     /**
25485      * <code>required .NameBytesPair value = 2;</code>
25486      */
getValue()25487     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
25488     /**
25489      * <code>required .NameBytesPair value = 2;</code>
25490      */
getValueOrBuilder()25491     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
25492   }
25493   /**
25494    * Protobuf type {@code CoprocessorServiceResponse}
25495    */
25496   public static final class CoprocessorServiceResponse extends
25497       com.google.protobuf.GeneratedMessage
25498       implements CoprocessorServiceResponseOrBuilder {
25499     // Use CoprocessorServiceResponse.newBuilder() to construct.
CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)25500     private CoprocessorServiceResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
25501       super(builder);
25502       this.unknownFields = builder.getUnknownFields();
25503     }
CoprocessorServiceResponse(boolean noInit)25504     private CoprocessorServiceResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
25505 
25506     private static final CoprocessorServiceResponse defaultInstance;
getDefaultInstance()25507     public static CoprocessorServiceResponse getDefaultInstance() {
25508       return defaultInstance;
25509     }
25510 
getDefaultInstanceForType()25511     public CoprocessorServiceResponse getDefaultInstanceForType() {
25512       return defaultInstance;
25513     }
25514 
25515     private final com.google.protobuf.UnknownFieldSet unknownFields;
25516     @java.lang.Override
25517     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()25518         getUnknownFields() {
25519       return this.unknownFields;
25520     }
CoprocessorServiceResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25521     private CoprocessorServiceResponse(
25522         com.google.protobuf.CodedInputStream input,
25523         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25524         throws com.google.protobuf.InvalidProtocolBufferException {
25525       initFields();
25526       int mutable_bitField0_ = 0;
25527       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
25528           com.google.protobuf.UnknownFieldSet.newBuilder();
25529       try {
25530         boolean done = false;
25531         while (!done) {
25532           int tag = input.readTag();
25533           switch (tag) {
25534             case 0:
25535               done = true;
25536               break;
25537             default: {
25538               if (!parseUnknownField(input, unknownFields,
25539                                      extensionRegistry, tag)) {
25540                 done = true;
25541               }
25542               break;
25543             }
25544             case 10: {
25545               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
25546               if (((bitField0_ & 0x00000001) == 0x00000001)) {
25547                 subBuilder = region_.toBuilder();
25548               }
25549               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
25550               if (subBuilder != null) {
25551                 subBuilder.mergeFrom(region_);
25552                 region_ = subBuilder.buildPartial();
25553               }
25554               bitField0_ |= 0x00000001;
25555               break;
25556             }
25557             case 18: {
25558               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
25559               if (((bitField0_ & 0x00000002) == 0x00000002)) {
25560                 subBuilder = value_.toBuilder();
25561               }
25562               value_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
25563               if (subBuilder != null) {
25564                 subBuilder.mergeFrom(value_);
25565                 value_ = subBuilder.buildPartial();
25566               }
25567               bitField0_ |= 0x00000002;
25568               break;
25569             }
25570           }
25571         }
25572       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25573         throw e.setUnfinishedMessage(this);
25574       } catch (java.io.IOException e) {
25575         throw new com.google.protobuf.InvalidProtocolBufferException(
25576             e.getMessage()).setUnfinishedMessage(this);
25577       } finally {
25578         this.unknownFields = unknownFields.build();
25579         makeExtensionsImmutable();
25580       }
25581     }
25582     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25583         getDescriptor() {
25584       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor;
25585     }
25586 
25587     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25588         internalGetFieldAccessorTable() {
25589       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable
25590           .ensureFieldAccessorsInitialized(
25591               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
25592     }
25593 
25594     public static com.google.protobuf.Parser<CoprocessorServiceResponse> PARSER =
25595         new com.google.protobuf.AbstractParser<CoprocessorServiceResponse>() {
25596       public CoprocessorServiceResponse parsePartialFrom(
25597           com.google.protobuf.CodedInputStream input,
25598           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25599           throws com.google.protobuf.InvalidProtocolBufferException {
25600         return new CoprocessorServiceResponse(input, extensionRegistry);
25601       }
25602     };
25603 
25604     @java.lang.Override
getParserForType()25605     public com.google.protobuf.Parser<CoprocessorServiceResponse> getParserForType() {
25606       return PARSER;
25607     }
25608 
25609     private int bitField0_;
25610     // required .RegionSpecifier region = 1;
25611     public static final int REGION_FIELD_NUMBER = 1;
25612     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
25613     /**
25614      * <code>required .RegionSpecifier region = 1;</code>
25615      */
hasRegion()25616     public boolean hasRegion() {
25617       return ((bitField0_ & 0x00000001) == 0x00000001);
25618     }
25619     /**
25620      * <code>required .RegionSpecifier region = 1;</code>
25621      */
getRegion()25622     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
25623       return region_;
25624     }
25625     /**
25626      * <code>required .RegionSpecifier region = 1;</code>
25627      */
getRegionOrBuilder()25628     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
25629       return region_;
25630     }
25631 
25632     // required .NameBytesPair value = 2;
25633     public static final int VALUE_FIELD_NUMBER = 2;
25634     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
25635     /**
25636      * <code>required .NameBytesPair value = 2;</code>
25637      */
hasValue()25638     public boolean hasValue() {
25639       return ((bitField0_ & 0x00000002) == 0x00000002);
25640     }
25641     /**
25642      * <code>required .NameBytesPair value = 2;</code>
25643      */
getValue()25644     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
25645       return value_;
25646     }
25647     /**
25648      * <code>required .NameBytesPair value = 2;</code>
25649      */
getValueOrBuilder()25650     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
25651       return value_;
25652     }
25653 
initFields()25654     private void initFields() {
25655       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25656       value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
25657     }
25658     private byte memoizedIsInitialized = -1;
isInitialized()25659     public final boolean isInitialized() {
25660       byte isInitialized = memoizedIsInitialized;
25661       if (isInitialized != -1) return isInitialized == 1;
25662 
25663       if (!hasRegion()) {
25664         memoizedIsInitialized = 0;
25665         return false;
25666       }
25667       if (!hasValue()) {
25668         memoizedIsInitialized = 0;
25669         return false;
25670       }
25671       if (!getRegion().isInitialized()) {
25672         memoizedIsInitialized = 0;
25673         return false;
25674       }
25675       if (!getValue().isInitialized()) {
25676         memoizedIsInitialized = 0;
25677         return false;
25678       }
25679       memoizedIsInitialized = 1;
25680       return true;
25681     }
25682 
writeTo(com.google.protobuf.CodedOutputStream output)25683     public void writeTo(com.google.protobuf.CodedOutputStream output)
25684                         throws java.io.IOException {
25685       getSerializedSize();
25686       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25687         output.writeMessage(1, region_);
25688       }
25689       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25690         output.writeMessage(2, value_);
25691       }
25692       getUnknownFields().writeTo(output);
25693     }
25694 
25695     private int memoizedSerializedSize = -1;
getSerializedSize()25696     public int getSerializedSize() {
25697       int size = memoizedSerializedSize;
25698       if (size != -1) return size;
25699 
25700       size = 0;
25701       if (((bitField0_ & 0x00000001) == 0x00000001)) {
25702         size += com.google.protobuf.CodedOutputStream
25703           .computeMessageSize(1, region_);
25704       }
25705       if (((bitField0_ & 0x00000002) == 0x00000002)) {
25706         size += com.google.protobuf.CodedOutputStream
25707           .computeMessageSize(2, value_);
25708       }
25709       size += getUnknownFields().getSerializedSize();
25710       memoizedSerializedSize = size;
25711       return size;
25712     }
25713 
25714     private static final long serialVersionUID = 0L;
25715     @java.lang.Override
writeReplace()25716     protected java.lang.Object writeReplace()
25717         throws java.io.ObjectStreamException {
25718       return super.writeReplace();
25719     }
25720 
25721     @java.lang.Override
equals(final java.lang.Object obj)25722     public boolean equals(final java.lang.Object obj) {
25723       if (obj == this) {
25724        return true;
25725       }
25726       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)) {
25727         return super.equals(obj);
25728       }
25729       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) obj;
25730 
25731       boolean result = true;
25732       result = result && (hasRegion() == other.hasRegion());
25733       if (hasRegion()) {
25734         result = result && getRegion()
25735             .equals(other.getRegion());
25736       }
25737       result = result && (hasValue() == other.hasValue());
25738       if (hasValue()) {
25739         result = result && getValue()
25740             .equals(other.getValue());
25741       }
25742       result = result &&
25743           getUnknownFields().equals(other.getUnknownFields());
25744       return result;
25745     }
25746 
25747     private int memoizedHashCode = 0;
25748     @java.lang.Override
hashCode()25749     public int hashCode() {
25750       if (memoizedHashCode != 0) {
25751         return memoizedHashCode;
25752       }
25753       int hash = 41;
25754       hash = (19 * hash) + getDescriptorForType().hashCode();
25755       if (hasRegion()) {
25756         hash = (37 * hash) + REGION_FIELD_NUMBER;
25757         hash = (53 * hash) + getRegion().hashCode();
25758       }
25759       if (hasValue()) {
25760         hash = (37 * hash) + VALUE_FIELD_NUMBER;
25761         hash = (53 * hash) + getValue().hashCode();
25762       }
25763       hash = (29 * hash) + getUnknownFields().hashCode();
25764       memoizedHashCode = hash;
25765       return hash;
25766     }
25767 
parseFrom( com.google.protobuf.ByteString data)25768     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
25769         com.google.protobuf.ByteString data)
25770         throws com.google.protobuf.InvalidProtocolBufferException {
25771       return PARSER.parseFrom(data);
25772     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25773     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
25774         com.google.protobuf.ByteString data,
25775         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25776         throws com.google.protobuf.InvalidProtocolBufferException {
25777       return PARSER.parseFrom(data, extensionRegistry);
25778     }
parseFrom(byte[] data)25779     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(byte[] data)
25780         throws com.google.protobuf.InvalidProtocolBufferException {
25781       return PARSER.parseFrom(data);
25782     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25783     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
25784         byte[] data,
25785         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25786         throws com.google.protobuf.InvalidProtocolBufferException {
25787       return PARSER.parseFrom(data, extensionRegistry);
25788     }
parseFrom(java.io.InputStream input)25789     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(java.io.InputStream input)
25790         throws java.io.IOException {
25791       return PARSER.parseFrom(input);
25792     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25793     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
25794         java.io.InputStream input,
25795         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25796         throws java.io.IOException {
25797       return PARSER.parseFrom(input, extensionRegistry);
25798     }
parseDelimitedFrom(java.io.InputStream input)25799     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(java.io.InputStream input)
25800         throws java.io.IOException {
25801       return PARSER.parseDelimitedFrom(input);
25802     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25803     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseDelimitedFrom(
25804         java.io.InputStream input,
25805         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25806         throws java.io.IOException {
25807       return PARSER.parseDelimitedFrom(input, extensionRegistry);
25808     }
parseFrom( com.google.protobuf.CodedInputStream input)25809     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
25810         com.google.protobuf.CodedInputStream input)
25811         throws java.io.IOException {
25812       return PARSER.parseFrom(input);
25813     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25814     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parseFrom(
25815         com.google.protobuf.CodedInputStream input,
25816         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25817         throws java.io.IOException {
25818       return PARSER.parseFrom(input, extensionRegistry);
25819     }
25820 
newBuilder()25821     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()25822     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype)25823     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse prototype) {
25824       return newBuilder().mergeFrom(prototype);
25825     }
toBuilder()25826     public Builder toBuilder() { return newBuilder(this); }
25827 
25828     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)25829     protected Builder newBuilderForType(
25830         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25831       Builder builder = new Builder(parent);
25832       return builder;
25833     }
25834     /**
25835      * Protobuf type {@code CoprocessorServiceResponse}
25836      */
25837     public static final class Builder extends
25838         com.google.protobuf.GeneratedMessage.Builder<Builder>
25839        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponseOrBuilder {
25840       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()25841           getDescriptor() {
25842         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor;
25843       }
25844 
25845       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()25846           internalGetFieldAccessorTable() {
25847         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_fieldAccessorTable
25848             .ensureFieldAccessorsInitialized(
25849                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
25850       }
25851 
25852       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.newBuilder()
Builder()25853       private Builder() {
25854         maybeForceBuilderInitialization();
25855       }
25856 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)25857       private Builder(
25858           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
25859         super(parent);
25860         maybeForceBuilderInitialization();
25861       }
maybeForceBuilderInitialization()25862       private void maybeForceBuilderInitialization() {
25863         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
25864           getRegionFieldBuilder();
25865           getValueFieldBuilder();
25866         }
25867       }
create()25868       private static Builder create() {
25869         return new Builder();
25870       }
25871 
clear()25872       public Builder clear() {
25873         super.clear();
25874         if (regionBuilder_ == null) {
25875           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25876         } else {
25877           regionBuilder_.clear();
25878         }
25879         bitField0_ = (bitField0_ & ~0x00000001);
25880         if (valueBuilder_ == null) {
25881           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
25882         } else {
25883           valueBuilder_.clear();
25884         }
25885         bitField0_ = (bitField0_ & ~0x00000002);
25886         return this;
25887       }
25888 
clone()25889       public Builder clone() {
25890         return create().mergeFrom(buildPartial());
25891       }
25892 
25893       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()25894           getDescriptorForType() {
25895         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CoprocessorServiceResponse_descriptor;
25896       }
25897 
getDefaultInstanceForType()25898       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse getDefaultInstanceForType() {
25899         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
25900       }
25901 
build()25902       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse build() {
25903         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = buildPartial();
25904         if (!result.isInitialized()) {
25905           throw newUninitializedMessageException(result);
25906         }
25907         return result;
25908       }
25909 
buildPartial()25910       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse buildPartial() {
25911         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse(this);
25912         int from_bitField0_ = bitField0_;
25913         int to_bitField0_ = 0;
25914         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
25915           to_bitField0_ |= 0x00000001;
25916         }
25917         if (regionBuilder_ == null) {
25918           result.region_ = region_;
25919         } else {
25920           result.region_ = regionBuilder_.build();
25921         }
25922         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
25923           to_bitField0_ |= 0x00000002;
25924         }
25925         if (valueBuilder_ == null) {
25926           result.value_ = value_;
25927         } else {
25928           result.value_ = valueBuilder_.build();
25929         }
25930         result.bitField0_ = to_bitField0_;
25931         onBuilt();
25932         return result;
25933       }
25934 
mergeFrom(com.google.protobuf.Message other)25935       public Builder mergeFrom(com.google.protobuf.Message other) {
25936         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) {
25937           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse)other);
25938         } else {
25939           super.mergeFrom(other);
25940           return this;
25941         }
25942       }
25943 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other)25944       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse other) {
25945         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()) return this;
25946         if (other.hasRegion()) {
25947           mergeRegion(other.getRegion());
25948         }
25949         if (other.hasValue()) {
25950           mergeValue(other.getValue());
25951         }
25952         this.mergeUnknownFields(other.getUnknownFields());
25953         return this;
25954       }
25955 
isInitialized()25956       public final boolean isInitialized() {
25957         if (!hasRegion()) {
25958 
25959           return false;
25960         }
25961         if (!hasValue()) {
25962 
25963           return false;
25964         }
25965         if (!getRegion().isInitialized()) {
25966 
25967           return false;
25968         }
25969         if (!getValue().isInitialized()) {
25970 
25971           return false;
25972         }
25973         return true;
25974       }
25975 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)25976       public Builder mergeFrom(
25977           com.google.protobuf.CodedInputStream input,
25978           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
25979           throws java.io.IOException {
25980         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse parsedMessage = null;
25981         try {
25982           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
25983         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
25984           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) e.getUnfinishedMessage();
25985           throw e;
25986         } finally {
25987           if (parsedMessage != null) {
25988             mergeFrom(parsedMessage);
25989           }
25990         }
25991         return this;
25992       }
25993       private int bitField0_;
25994 
25995       // required .RegionSpecifier region = 1;
25996       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
25997       private com.google.protobuf.SingleFieldBuilder<
25998           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
25999       /**
26000        * <code>required .RegionSpecifier region = 1;</code>
26001        */
hasRegion()26002       public boolean hasRegion() {
26003         return ((bitField0_ & 0x00000001) == 0x00000001);
26004       }
26005       /**
26006        * <code>required .RegionSpecifier region = 1;</code>
26007        */
getRegion()26008       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
26009         if (regionBuilder_ == null) {
26010           return region_;
26011         } else {
26012           return regionBuilder_.getMessage();
26013         }
26014       }
26015       /**
26016        * <code>required .RegionSpecifier region = 1;</code>
26017        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)26018       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
26019         if (regionBuilder_ == null) {
26020           if (value == null) {
26021             throw new NullPointerException();
26022           }
26023           region_ = value;
26024           onChanged();
26025         } else {
26026           regionBuilder_.setMessage(value);
26027         }
26028         bitField0_ |= 0x00000001;
26029         return this;
26030       }
26031       /**
26032        * <code>required .RegionSpecifier region = 1;</code>
26033        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)26034       public Builder setRegion(
26035           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
26036         if (regionBuilder_ == null) {
26037           region_ = builderForValue.build();
26038           onChanged();
26039         } else {
26040           regionBuilder_.setMessage(builderForValue.build());
26041         }
26042         bitField0_ |= 0x00000001;
26043         return this;
26044       }
26045       /**
26046        * <code>required .RegionSpecifier region = 1;</code>
26047        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)26048       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
26049         if (regionBuilder_ == null) {
26050           if (((bitField0_ & 0x00000001) == 0x00000001) &&
26051               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
26052             region_ =
26053               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
26054           } else {
26055             region_ = value;
26056           }
26057           onChanged();
26058         } else {
26059           regionBuilder_.mergeFrom(value);
26060         }
26061         bitField0_ |= 0x00000001;
26062         return this;
26063       }
26064       /**
26065        * <code>required .RegionSpecifier region = 1;</code>
26066        */
clearRegion()26067       public Builder clearRegion() {
26068         if (regionBuilder_ == null) {
26069           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
26070           onChanged();
26071         } else {
26072           regionBuilder_.clear();
26073         }
26074         bitField0_ = (bitField0_ & ~0x00000001);
26075         return this;
26076       }
26077       /**
26078        * <code>required .RegionSpecifier region = 1;</code>
26079        */
getRegionBuilder()26080       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
26081         bitField0_ |= 0x00000001;
26082         onChanged();
26083         return getRegionFieldBuilder().getBuilder();
26084       }
26085       /**
26086        * <code>required .RegionSpecifier region = 1;</code>
26087        */
getRegionOrBuilder()26088       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
26089         if (regionBuilder_ != null) {
26090           return regionBuilder_.getMessageOrBuilder();
26091         } else {
26092           return region_;
26093         }
26094       }
26095       /**
26096        * <code>required .RegionSpecifier region = 1;</code>
26097        */
26098       private com.google.protobuf.SingleFieldBuilder<
26099           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()26100           getRegionFieldBuilder() {
26101         if (regionBuilder_ == null) {
26102           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26103               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
26104                   region_,
26105                   getParentForChildren(),
26106                   isClean());
26107           region_ = null;
26108         }
26109         return regionBuilder_;
26110       }
26111 
26112       // required .NameBytesPair value = 2;
26113       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
26114       private com.google.protobuf.SingleFieldBuilder<
26115           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
26116       /**
26117        * <code>required .NameBytesPair value = 2;</code>
26118        */
hasValue()26119       public boolean hasValue() {
26120         return ((bitField0_ & 0x00000002) == 0x00000002);
26121       }
26122       /**
26123        * <code>required .NameBytesPair value = 2;</code>
26124        */
getValue()26125       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
26126         if (valueBuilder_ == null) {
26127           return value_;
26128         } else {
26129           return valueBuilder_.getMessage();
26130         }
26131       }
26132       /**
26133        * <code>required .NameBytesPair value = 2;</code>
26134        */
setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)26135       public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
26136         if (valueBuilder_ == null) {
26137           if (value == null) {
26138             throw new NullPointerException();
26139           }
26140           value_ = value;
26141           onChanged();
26142         } else {
26143           valueBuilder_.setMessage(value);
26144         }
26145         bitField0_ |= 0x00000002;
26146         return this;
26147       }
26148       /**
26149        * <code>required .NameBytesPair value = 2;</code>
26150        */
setValue( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)26151       public Builder setValue(
26152           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
26153         if (valueBuilder_ == null) {
26154           value_ = builderForValue.build();
26155           onChanged();
26156         } else {
26157           valueBuilder_.setMessage(builderForValue.build());
26158         }
26159         bitField0_ |= 0x00000002;
26160         return this;
26161       }
26162       /**
26163        * <code>required .NameBytesPair value = 2;</code>
26164        */
mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)26165       public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
26166         if (valueBuilder_ == null) {
26167           if (((bitField0_ & 0x00000002) == 0x00000002) &&
26168               value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
26169             value_ =
26170               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
26171           } else {
26172             value_ = value;
26173           }
26174           onChanged();
26175         } else {
26176           valueBuilder_.mergeFrom(value);
26177         }
26178         bitField0_ |= 0x00000002;
26179         return this;
26180       }
26181       /**
26182        * <code>required .NameBytesPair value = 2;</code>
26183        */
clearValue()26184       public Builder clearValue() {
26185         if (valueBuilder_ == null) {
26186           value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
26187           onChanged();
26188         } else {
26189           valueBuilder_.clear();
26190         }
26191         bitField0_ = (bitField0_ & ~0x00000002);
26192         return this;
26193       }
26194       /**
26195        * <code>required .NameBytesPair value = 2;</code>
26196        */
getValueBuilder()26197       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
26198         bitField0_ |= 0x00000002;
26199         onChanged();
26200         return getValueFieldBuilder().getBuilder();
26201       }
26202       /**
26203        * <code>required .NameBytesPair value = 2;</code>
26204        */
getValueOrBuilder()26205       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
26206         if (valueBuilder_ != null) {
26207           return valueBuilder_.getMessageOrBuilder();
26208         } else {
26209           return value_;
26210         }
26211       }
26212       /**
26213        * <code>required .NameBytesPair value = 2;</code>
26214        */
26215       private com.google.protobuf.SingleFieldBuilder<
26216           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getValueFieldBuilder()26217           getValueFieldBuilder() {
26218         if (valueBuilder_ == null) {
26219           valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
26220               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
26221                   value_,
26222                   getParentForChildren(),
26223                   isClean());
26224           value_ = null;
26225         }
26226         return valueBuilder_;
26227       }
26228 
26229       // @@protoc_insertion_point(builder_scope:CoprocessorServiceResponse)
26230     }
26231 
26232     static {
26233       defaultInstance = new CoprocessorServiceResponse(true);
defaultInstance.initFields()26234       defaultInstance.initFields();
26235     }
26236 
26237     // @@protoc_insertion_point(class_scope:CoprocessorServiceResponse)
26238   }
26239 
26240   public interface ActionOrBuilder
26241       extends com.google.protobuf.MessageOrBuilder {
26242 
26243     // optional uint32 index = 1;
26244     /**
26245      * <code>optional uint32 index = 1;</code>
26246      *
26247      * <pre>
26248      * If part of a multi action, useful aligning
26249      * result with what was originally submitted.
26250      * </pre>
26251      */
hasIndex()26252     boolean hasIndex();
26253     /**
26254      * <code>optional uint32 index = 1;</code>
26255      *
26256      * <pre>
26257      * If part of a multi action, useful aligning
26258      * result with what was originally submitted.
26259      * </pre>
26260      */
getIndex()26261     int getIndex();
26262 
26263     // optional .MutationProto mutation = 2;
26264     /**
26265      * <code>optional .MutationProto mutation = 2;</code>
26266      */
hasMutation()26267     boolean hasMutation();
26268     /**
26269      * <code>optional .MutationProto mutation = 2;</code>
26270      */
getMutation()26271     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation();
26272     /**
26273      * <code>optional .MutationProto mutation = 2;</code>
26274      */
getMutationOrBuilder()26275     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder();
26276 
26277     // optional .Get get = 3;
26278     /**
26279      * <code>optional .Get get = 3;</code>
26280      */
hasGet()26281     boolean hasGet();
26282     /**
26283      * <code>optional .Get get = 3;</code>
26284      */
getGet()26285     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
26286     /**
26287      * <code>optional .Get get = 3;</code>
26288      */
getGetOrBuilder()26289     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
26290 
26291     // optional .CoprocessorServiceCall service_call = 4;
26292     /**
26293      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26294      */
hasServiceCall()26295     boolean hasServiceCall();
26296     /**
26297      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26298      */
getServiceCall()26299     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall();
26300     /**
26301      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26302      */
getServiceCallOrBuilder()26303     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder();
26304   }
26305   /**
26306    * Protobuf type {@code Action}
26307    *
26308    * <pre>
26309    * Either a Get or a Mutation
26310    * </pre>
26311    */
26312   public static final class Action extends
26313       com.google.protobuf.GeneratedMessage
26314       implements ActionOrBuilder {
26315     // Use Action.newBuilder() to construct.
Action(com.google.protobuf.GeneratedMessage.Builder<?> builder)26316     private Action(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
26317       super(builder);
26318       this.unknownFields = builder.getUnknownFields();
26319     }
Action(boolean noInit)26320     private Action(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
26321 
26322     private static final Action defaultInstance;
getDefaultInstance()26323     public static Action getDefaultInstance() {
26324       return defaultInstance;
26325     }
26326 
getDefaultInstanceForType()26327     public Action getDefaultInstanceForType() {
26328       return defaultInstance;
26329     }
26330 
26331     private final com.google.protobuf.UnknownFieldSet unknownFields;
26332     @java.lang.Override
26333     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()26334         getUnknownFields() {
26335       return this.unknownFields;
26336     }
Action( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26337     private Action(
26338         com.google.protobuf.CodedInputStream input,
26339         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26340         throws com.google.protobuf.InvalidProtocolBufferException {
26341       initFields();
26342       int mutable_bitField0_ = 0;
26343       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
26344           com.google.protobuf.UnknownFieldSet.newBuilder();
26345       try {
26346         boolean done = false;
26347         while (!done) {
26348           int tag = input.readTag();
26349           switch (tag) {
26350             case 0:
26351               done = true;
26352               break;
26353             default: {
26354               if (!parseUnknownField(input, unknownFields,
26355                                      extensionRegistry, tag)) {
26356                 done = true;
26357               }
26358               break;
26359             }
26360             case 8: {
26361               bitField0_ |= 0x00000001;
26362               index_ = input.readUInt32();
26363               break;
26364             }
26365             case 18: {
26366               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null;
26367               if (((bitField0_ & 0x00000002) == 0x00000002)) {
26368                 subBuilder = mutation_.toBuilder();
26369               }
26370               mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry);
26371               if (subBuilder != null) {
26372                 subBuilder.mergeFrom(mutation_);
26373                 mutation_ = subBuilder.buildPartial();
26374               }
26375               bitField0_ |= 0x00000002;
26376               break;
26377             }
26378             case 26: {
26379               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null;
26380               if (((bitField0_ & 0x00000004) == 0x00000004)) {
26381                 subBuilder = get_.toBuilder();
26382               }
26383               get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry);
26384               if (subBuilder != null) {
26385                 subBuilder.mergeFrom(get_);
26386                 get_ = subBuilder.buildPartial();
26387               }
26388               bitField0_ |= 0x00000004;
26389               break;
26390             }
26391             case 34: {
26392               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder subBuilder = null;
26393               if (((bitField0_ & 0x00000008) == 0x00000008)) {
26394                 subBuilder = serviceCall_.toBuilder();
26395               }
26396               serviceCall_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.PARSER, extensionRegistry);
26397               if (subBuilder != null) {
26398                 subBuilder.mergeFrom(serviceCall_);
26399                 serviceCall_ = subBuilder.buildPartial();
26400               }
26401               bitField0_ |= 0x00000008;
26402               break;
26403             }
26404           }
26405         }
26406       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26407         throw e.setUnfinishedMessage(this);
26408       } catch (java.io.IOException e) {
26409         throw new com.google.protobuf.InvalidProtocolBufferException(
26410             e.getMessage()).setUnfinishedMessage(this);
26411       } finally {
26412         this.unknownFields = unknownFields.build();
26413         makeExtensionsImmutable();
26414       }
26415     }
26416     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()26417         getDescriptor() {
26418       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_descriptor;
26419     }
26420 
26421     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()26422         internalGetFieldAccessorTable() {
26423       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_fieldAccessorTable
26424           .ensureFieldAccessorsInitialized(
26425               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
26426     }
26427 
26428     public static com.google.protobuf.Parser<Action> PARSER =
26429         new com.google.protobuf.AbstractParser<Action>() {
26430       public Action parsePartialFrom(
26431           com.google.protobuf.CodedInputStream input,
26432           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26433           throws com.google.protobuf.InvalidProtocolBufferException {
26434         return new Action(input, extensionRegistry);
26435       }
26436     };
26437 
26438     @java.lang.Override
getParserForType()26439     public com.google.protobuf.Parser<Action> getParserForType() {
26440       return PARSER;
26441     }
26442 
26443     private int bitField0_;
26444     // optional uint32 index = 1;
26445     public static final int INDEX_FIELD_NUMBER = 1;
26446     private int index_;
26447     /**
26448      * <code>optional uint32 index = 1;</code>
26449      *
26450      * <pre>
26451      * If part of a multi action, useful aligning
26452      * result with what was originally submitted.
26453      * </pre>
26454      */
hasIndex()26455     public boolean hasIndex() {
26456       return ((bitField0_ & 0x00000001) == 0x00000001);
26457     }
26458     /**
26459      * <code>optional uint32 index = 1;</code>
26460      *
26461      * <pre>
26462      * If part of a multi action, useful aligning
26463      * result with what was originally submitted.
26464      * </pre>
26465      */
getIndex()26466     public int getIndex() {
26467       return index_;
26468     }
26469 
26470     // optional .MutationProto mutation = 2;
26471     public static final int MUTATION_FIELD_NUMBER = 2;
26472     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_;
26473     /**
26474      * <code>optional .MutationProto mutation = 2;</code>
26475      */
hasMutation()26476     public boolean hasMutation() {
26477       return ((bitField0_ & 0x00000002) == 0x00000002);
26478     }
26479     /**
26480      * <code>optional .MutationProto mutation = 2;</code>
26481      */
getMutation()26482     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
26483       return mutation_;
26484     }
26485     /**
26486      * <code>optional .MutationProto mutation = 2;</code>
26487      */
getMutationOrBuilder()26488     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
26489       return mutation_;
26490     }
26491 
26492     // optional .Get get = 3;
26493     public static final int GET_FIELD_NUMBER = 3;
26494     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_;
26495     /**
26496      * <code>optional .Get get = 3;</code>
26497      */
hasGet()26498     public boolean hasGet() {
26499       return ((bitField0_ & 0x00000004) == 0x00000004);
26500     }
26501     /**
26502      * <code>optional .Get get = 3;</code>
26503      */
getGet()26504     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
26505       return get_;
26506     }
26507     /**
26508      * <code>optional .Get get = 3;</code>
26509      */
getGetOrBuilder()26510     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
26511       return get_;
26512     }
26513 
26514     // optional .CoprocessorServiceCall service_call = 4;
26515     public static final int SERVICE_CALL_FIELD_NUMBER = 4;
26516     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_;
26517     /**
26518      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26519      */
hasServiceCall()26520     public boolean hasServiceCall() {
26521       return ((bitField0_ & 0x00000008) == 0x00000008);
26522     }
26523     /**
26524      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26525      */
getServiceCall()26526     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
26527       return serviceCall_;
26528     }
26529     /**
26530      * <code>optional .CoprocessorServiceCall service_call = 4;</code>
26531      */
getServiceCallOrBuilder()26532     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
26533       return serviceCall_;
26534     }
26535 
initFields()26536     private void initFields() {
26537       index_ = 0;
26538       mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
26539       get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
26540       serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
26541     }
26542     private byte memoizedIsInitialized = -1;
isInitialized()26543     public final boolean isInitialized() {
26544       byte isInitialized = memoizedIsInitialized;
26545       if (isInitialized != -1) return isInitialized == 1;
26546 
26547       if (hasMutation()) {
26548         if (!getMutation().isInitialized()) {
26549           memoizedIsInitialized = 0;
26550           return false;
26551         }
26552       }
26553       if (hasGet()) {
26554         if (!getGet().isInitialized()) {
26555           memoizedIsInitialized = 0;
26556           return false;
26557         }
26558       }
26559       if (hasServiceCall()) {
26560         if (!getServiceCall().isInitialized()) {
26561           memoizedIsInitialized = 0;
26562           return false;
26563         }
26564       }
26565       memoizedIsInitialized = 1;
26566       return true;
26567     }
26568 
writeTo(com.google.protobuf.CodedOutputStream output)26569     public void writeTo(com.google.protobuf.CodedOutputStream output)
26570                         throws java.io.IOException {
26571       getSerializedSize();
26572       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26573         output.writeUInt32(1, index_);
26574       }
26575       if (((bitField0_ & 0x00000002) == 0x00000002)) {
26576         output.writeMessage(2, mutation_);
26577       }
26578       if (((bitField0_ & 0x00000004) == 0x00000004)) {
26579         output.writeMessage(3, get_);
26580       }
26581       if (((bitField0_ & 0x00000008) == 0x00000008)) {
26582         output.writeMessage(4, serviceCall_);
26583       }
26584       getUnknownFields().writeTo(output);
26585     }
26586 
26587     private int memoizedSerializedSize = -1;
getSerializedSize()26588     public int getSerializedSize() {
26589       int size = memoizedSerializedSize;
26590       if (size != -1) return size;
26591 
26592       size = 0;
26593       if (((bitField0_ & 0x00000001) == 0x00000001)) {
26594         size += com.google.protobuf.CodedOutputStream
26595           .computeUInt32Size(1, index_);
26596       }
26597       if (((bitField0_ & 0x00000002) == 0x00000002)) {
26598         size += com.google.protobuf.CodedOutputStream
26599           .computeMessageSize(2, mutation_);
26600       }
26601       if (((bitField0_ & 0x00000004) == 0x00000004)) {
26602         size += com.google.protobuf.CodedOutputStream
26603           .computeMessageSize(3, get_);
26604       }
26605       if (((bitField0_ & 0x00000008) == 0x00000008)) {
26606         size += com.google.protobuf.CodedOutputStream
26607           .computeMessageSize(4, serviceCall_);
26608       }
26609       size += getUnknownFields().getSerializedSize();
26610       memoizedSerializedSize = size;
26611       return size;
26612     }
26613 
26614     private static final long serialVersionUID = 0L;
26615     @java.lang.Override
writeReplace()26616     protected java.lang.Object writeReplace()
26617         throws java.io.ObjectStreamException {
26618       return super.writeReplace();
26619     }
26620 
26621     @java.lang.Override
equals(final java.lang.Object obj)26622     public boolean equals(final java.lang.Object obj) {
26623       if (obj == this) {
26624        return true;
26625       }
26626       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)) {
26627         return super.equals(obj);
26628       }
26629       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) obj;
26630 
26631       boolean result = true;
26632       result = result && (hasIndex() == other.hasIndex());
26633       if (hasIndex()) {
26634         result = result && (getIndex()
26635             == other.getIndex());
26636       }
26637       result = result && (hasMutation() == other.hasMutation());
26638       if (hasMutation()) {
26639         result = result && getMutation()
26640             .equals(other.getMutation());
26641       }
26642       result = result && (hasGet() == other.hasGet());
26643       if (hasGet()) {
26644         result = result && getGet()
26645             .equals(other.getGet());
26646       }
26647       result = result && (hasServiceCall() == other.hasServiceCall());
26648       if (hasServiceCall()) {
26649         result = result && getServiceCall()
26650             .equals(other.getServiceCall());
26651       }
26652       result = result &&
26653           getUnknownFields().equals(other.getUnknownFields());
26654       return result;
26655     }
26656 
26657     private int memoizedHashCode = 0;
26658     @java.lang.Override
hashCode()26659     public int hashCode() {
26660       if (memoizedHashCode != 0) {
26661         return memoizedHashCode;
26662       }
26663       int hash = 41;
26664       hash = (19 * hash) + getDescriptorForType().hashCode();
26665       if (hasIndex()) {
26666         hash = (37 * hash) + INDEX_FIELD_NUMBER;
26667         hash = (53 * hash) + getIndex();
26668       }
26669       if (hasMutation()) {
26670         hash = (37 * hash) + MUTATION_FIELD_NUMBER;
26671         hash = (53 * hash) + getMutation().hashCode();
26672       }
26673       if (hasGet()) {
26674         hash = (37 * hash) + GET_FIELD_NUMBER;
26675         hash = (53 * hash) + getGet().hashCode();
26676       }
26677       if (hasServiceCall()) {
26678         hash = (37 * hash) + SERVICE_CALL_FIELD_NUMBER;
26679         hash = (53 * hash) + getServiceCall().hashCode();
26680       }
26681       hash = (29 * hash) + getUnknownFields().hashCode();
26682       memoizedHashCode = hash;
26683       return hash;
26684     }
26685 
parseFrom( com.google.protobuf.ByteString data)26686     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
26687         com.google.protobuf.ByteString data)
26688         throws com.google.protobuf.InvalidProtocolBufferException {
26689       return PARSER.parseFrom(data);
26690     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26691     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
26692         com.google.protobuf.ByteString data,
26693         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26694         throws com.google.protobuf.InvalidProtocolBufferException {
26695       return PARSER.parseFrom(data, extensionRegistry);
26696     }
parseFrom(byte[] data)26697     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(byte[] data)
26698         throws com.google.protobuf.InvalidProtocolBufferException {
26699       return PARSER.parseFrom(data);
26700     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26701     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
26702         byte[] data,
26703         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26704         throws com.google.protobuf.InvalidProtocolBufferException {
26705       return PARSER.parseFrom(data, extensionRegistry);
26706     }
parseFrom(java.io.InputStream input)26707     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(java.io.InputStream input)
26708         throws java.io.IOException {
26709       return PARSER.parseFrom(input);
26710     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26711     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
26712         java.io.InputStream input,
26713         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26714         throws java.io.IOException {
26715       return PARSER.parseFrom(input, extensionRegistry);
26716     }
parseDelimitedFrom(java.io.InputStream input)26717     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(java.io.InputStream input)
26718         throws java.io.IOException {
26719       return PARSER.parseDelimitedFrom(input);
26720     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26721     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseDelimitedFrom(
26722         java.io.InputStream input,
26723         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26724         throws java.io.IOException {
26725       return PARSER.parseDelimitedFrom(input, extensionRegistry);
26726     }
parseFrom( com.google.protobuf.CodedInputStream input)26727     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
26728         com.google.protobuf.CodedInputStream input)
26729         throws java.io.IOException {
26730       return PARSER.parseFrom(input);
26731     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26732     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parseFrom(
26733         com.google.protobuf.CodedInputStream input,
26734         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26735         throws java.io.IOException {
26736       return PARSER.parseFrom(input, extensionRegistry);
26737     }
26738 
newBuilder()26739     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()26740     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action prototype)26741     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action prototype) {
26742       return newBuilder().mergeFrom(prototype);
26743     }
toBuilder()26744     public Builder toBuilder() { return newBuilder(this); }
26745 
26746     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)26747     protected Builder newBuilderForType(
26748         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26749       Builder builder = new Builder(parent);
26750       return builder;
26751     }
26752     /**
26753      * Protobuf type {@code Action}
26754      *
26755      * <pre>
26756      * Either a Get or a Mutation
26757      * </pre>
26758      */
26759     public static final class Builder extends
26760         com.google.protobuf.GeneratedMessage.Builder<Builder>
26761        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder {
26762       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()26763           getDescriptor() {
26764         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_descriptor;
26765       }
26766 
26767       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()26768           internalGetFieldAccessorTable() {
26769         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_fieldAccessorTable
26770             .ensureFieldAccessorsInitialized(
26771                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder.class);
26772       }
26773 
26774       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.newBuilder()
Builder()26775       private Builder() {
26776         maybeForceBuilderInitialization();
26777       }
26778 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)26779       private Builder(
26780           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
26781         super(parent);
26782         maybeForceBuilderInitialization();
26783       }
maybeForceBuilderInitialization()26784       private void maybeForceBuilderInitialization() {
26785         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
26786           getMutationFieldBuilder();
26787           getGetFieldBuilder();
26788           getServiceCallFieldBuilder();
26789         }
26790       }
create()26791       private static Builder create() {
26792         return new Builder();
26793       }
26794 
clear()26795       public Builder clear() {
26796         super.clear();
26797         index_ = 0;
26798         bitField0_ = (bitField0_ & ~0x00000001);
26799         if (mutationBuilder_ == null) {
26800           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
26801         } else {
26802           mutationBuilder_.clear();
26803         }
26804         bitField0_ = (bitField0_ & ~0x00000002);
26805         if (getBuilder_ == null) {
26806           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
26807         } else {
26808           getBuilder_.clear();
26809         }
26810         bitField0_ = (bitField0_ & ~0x00000004);
26811         if (serviceCallBuilder_ == null) {
26812           serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
26813         } else {
26814           serviceCallBuilder_.clear();
26815         }
26816         bitField0_ = (bitField0_ & ~0x00000008);
26817         return this;
26818       }
26819 
clone()26820       public Builder clone() {
26821         return create().mergeFrom(buildPartial());
26822       }
26823 
26824       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()26825           getDescriptorForType() {
26826         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Action_descriptor;
26827       }
26828 
getDefaultInstanceForType()26829       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getDefaultInstanceForType() {
26830         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance();
26831       }
26832 
build()26833       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action build() {
26834         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = buildPartial();
26835         if (!result.isInitialized()) {
26836           throw newUninitializedMessageException(result);
26837         }
26838         return result;
26839       }
26840 
buildPartial()26841       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action buildPartial() {
26842         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action(this);
26843         int from_bitField0_ = bitField0_;
26844         int to_bitField0_ = 0;
26845         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
26846           to_bitField0_ |= 0x00000001;
26847         }
26848         result.index_ = index_;
26849         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
26850           to_bitField0_ |= 0x00000002;
26851         }
26852         if (mutationBuilder_ == null) {
26853           result.mutation_ = mutation_;
26854         } else {
26855           result.mutation_ = mutationBuilder_.build();
26856         }
26857         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
26858           to_bitField0_ |= 0x00000004;
26859         }
26860         if (getBuilder_ == null) {
26861           result.get_ = get_;
26862         } else {
26863           result.get_ = getBuilder_.build();
26864         }
26865         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
26866           to_bitField0_ |= 0x00000008;
26867         }
26868         if (serviceCallBuilder_ == null) {
26869           result.serviceCall_ = serviceCall_;
26870         } else {
26871           result.serviceCall_ = serviceCallBuilder_.build();
26872         }
26873         result.bitField0_ = to_bitField0_;
26874         onBuilt();
26875         return result;
26876       }
26877 
mergeFrom(com.google.protobuf.Message other)26878       public Builder mergeFrom(com.google.protobuf.Message other) {
26879         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) {
26880           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action)other);
26881         } else {
26882           super.mergeFrom(other);
26883           return this;
26884         }
26885       }
26886 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other)26887       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action other) {
26888         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance()) return this;
26889         if (other.hasIndex()) {
26890           setIndex(other.getIndex());
26891         }
26892         if (other.hasMutation()) {
26893           mergeMutation(other.getMutation());
26894         }
26895         if (other.hasGet()) {
26896           mergeGet(other.getGet());
26897         }
26898         if (other.hasServiceCall()) {
26899           mergeServiceCall(other.getServiceCall());
26900         }
26901         this.mergeUnknownFields(other.getUnknownFields());
26902         return this;
26903       }
26904 
isInitialized()26905       public final boolean isInitialized() {
26906         if (hasMutation()) {
26907           if (!getMutation().isInitialized()) {
26908 
26909             return false;
26910           }
26911         }
26912         if (hasGet()) {
26913           if (!getGet().isInitialized()) {
26914 
26915             return false;
26916           }
26917         }
26918         if (hasServiceCall()) {
26919           if (!getServiceCall().isInitialized()) {
26920 
26921             return false;
26922           }
26923         }
26924         return true;
26925       }
26926 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)26927       public Builder mergeFrom(
26928           com.google.protobuf.CodedInputStream input,
26929           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
26930           throws java.io.IOException {
26931         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action parsedMessage = null;
26932         try {
26933           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
26934         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
26935           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action) e.getUnfinishedMessage();
26936           throw e;
26937         } finally {
26938           if (parsedMessage != null) {
26939             mergeFrom(parsedMessage);
26940           }
26941         }
26942         return this;
26943       }
26944       private int bitField0_;
26945 
26946       // optional uint32 index = 1;
26947       private int index_ ;
26948       /**
26949        * <code>optional uint32 index = 1;</code>
26950        *
26951        * <pre>
26952        * If part of a multi action, useful aligning
26953        * result with what was originally submitted.
26954        * </pre>
26955        */
hasIndex()26956       public boolean hasIndex() {
26957         return ((bitField0_ & 0x00000001) == 0x00000001);
26958       }
26959       /**
26960        * <code>optional uint32 index = 1;</code>
26961        *
26962        * <pre>
26963        * If part of a multi action, useful aligning
26964        * result with what was originally submitted.
26965        * </pre>
26966        */
getIndex()26967       public int getIndex() {
26968         return index_;
26969       }
26970       /**
26971        * <code>optional uint32 index = 1;</code>
26972        *
26973        * <pre>
26974        * If part of a multi action, useful aligning
26975        * result with what was originally submitted.
26976        * </pre>
26977        */
setIndex(int value)26978       public Builder setIndex(int value) {
26979         bitField0_ |= 0x00000001;
26980         index_ = value;
26981         onChanged();
26982         return this;
26983       }
26984       /**
26985        * <code>optional uint32 index = 1;</code>
26986        *
26987        * <pre>
26988        * If part of a multi action, useful aligning
26989        * result with what was originally submitted.
26990        * </pre>
26991        */
clearIndex()26992       public Builder clearIndex() {
26993         bitField0_ = (bitField0_ & ~0x00000001);
26994         index_ = 0;
26995         onChanged();
26996         return this;
26997       }
26998 
26999       // optional .MutationProto mutation = 2;
27000       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
27001       private com.google.protobuf.SingleFieldBuilder<
27002           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_;
27003       /**
27004        * <code>optional .MutationProto mutation = 2;</code>
27005        */
hasMutation()27006       public boolean hasMutation() {
27007         return ((bitField0_ & 0x00000002) == 0x00000002);
27008       }
27009       /**
27010        * <code>optional .MutationProto mutation = 2;</code>
27011        */
getMutation()27012       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() {
27013         if (mutationBuilder_ == null) {
27014           return mutation_;
27015         } else {
27016           return mutationBuilder_.getMessage();
27017         }
27018       }
27019       /**
27020        * <code>optional .MutationProto mutation = 2;</code>
27021        */
setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value)27022       public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
27023         if (mutationBuilder_ == null) {
27024           if (value == null) {
27025             throw new NullPointerException();
27026           }
27027           mutation_ = value;
27028           onChanged();
27029         } else {
27030           mutationBuilder_.setMessage(value);
27031         }
27032         bitField0_ |= 0x00000002;
27033         return this;
27034       }
27035       /**
27036        * <code>optional .MutationProto mutation = 2;</code>
27037        */
setMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue)27038       public Builder setMutation(
27039           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
27040         if (mutationBuilder_ == null) {
27041           mutation_ = builderForValue.build();
27042           onChanged();
27043         } else {
27044           mutationBuilder_.setMessage(builderForValue.build());
27045         }
27046         bitField0_ |= 0x00000002;
27047         return this;
27048       }
27049       /**
27050        * <code>optional .MutationProto mutation = 2;</code>
27051        */
mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value)27052       public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
27053         if (mutationBuilder_ == null) {
27054           if (((bitField0_ & 0x00000002) == 0x00000002) &&
27055               mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) {
27056             mutation_ =
27057               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial();
27058           } else {
27059             mutation_ = value;
27060           }
27061           onChanged();
27062         } else {
27063           mutationBuilder_.mergeFrom(value);
27064         }
27065         bitField0_ |= 0x00000002;
27066         return this;
27067       }
27068       /**
27069        * <code>optional .MutationProto mutation = 2;</code>
27070        */
clearMutation()27071       public Builder clearMutation() {
27072         if (mutationBuilder_ == null) {
27073           mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance();
27074           onChanged();
27075         } else {
27076           mutationBuilder_.clear();
27077         }
27078         bitField0_ = (bitField0_ & ~0x00000002);
27079         return this;
27080       }
27081       /**
27082        * <code>optional .MutationProto mutation = 2;</code>
27083        */
getMutationBuilder()27084       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() {
27085         bitField0_ |= 0x00000002;
27086         onChanged();
27087         return getMutationFieldBuilder().getBuilder();
27088       }
27089       /**
27090        * <code>optional .MutationProto mutation = 2;</code>
27091        */
getMutationOrBuilder()27092       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() {
27093         if (mutationBuilder_ != null) {
27094           return mutationBuilder_.getMessageOrBuilder();
27095         } else {
27096           return mutation_;
27097         }
27098       }
27099       /**
27100        * <code>optional .MutationProto mutation = 2;</code>
27101        */
27102       private com.google.protobuf.SingleFieldBuilder<
27103           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationFieldBuilder()27104           getMutationFieldBuilder() {
27105         if (mutationBuilder_ == null) {
27106           mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder<
27107               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>(
27108                   mutation_,
27109                   getParentForChildren(),
27110                   isClean());
27111           mutation_ = null;
27112         }
27113         return mutationBuilder_;
27114       }
27115 
27116       // optional .Get get = 3;
27117       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
27118       private com.google.protobuf.SingleFieldBuilder<
27119           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_;
27120       /**
27121        * <code>optional .Get get = 3;</code>
27122        */
hasGet()27123       public boolean hasGet() {
27124         return ((bitField0_ & 0x00000004) == 0x00000004);
27125       }
27126       /**
27127        * <code>optional .Get get = 3;</code>
27128        */
getGet()27129       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() {
27130         if (getBuilder_ == null) {
27131           return get_;
27132         } else {
27133           return getBuilder_.getMessage();
27134         }
27135       }
27136       /**
27137        * <code>optional .Get get = 3;</code>
27138        */
setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value)27139       public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
27140         if (getBuilder_ == null) {
27141           if (value == null) {
27142             throw new NullPointerException();
27143           }
27144           get_ = value;
27145           onChanged();
27146         } else {
27147           getBuilder_.setMessage(value);
27148         }
27149         bitField0_ |= 0x00000004;
27150         return this;
27151       }
27152       /**
27153        * <code>optional .Get get = 3;</code>
27154        */
setGet( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue)27155       public Builder setGet(
27156           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) {
27157         if (getBuilder_ == null) {
27158           get_ = builderForValue.build();
27159           onChanged();
27160         } else {
27161           getBuilder_.setMessage(builderForValue.build());
27162         }
27163         bitField0_ |= 0x00000004;
27164         return this;
27165       }
27166       /**
27167        * <code>optional .Get get = 3;</code>
27168        */
mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value)27169       public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) {
27170         if (getBuilder_ == null) {
27171           if (((bitField0_ & 0x00000004) == 0x00000004) &&
27172               get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) {
27173             get_ =
27174               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial();
27175           } else {
27176             get_ = value;
27177           }
27178           onChanged();
27179         } else {
27180           getBuilder_.mergeFrom(value);
27181         }
27182         bitField0_ |= 0x00000004;
27183         return this;
27184       }
27185       /**
27186        * <code>optional .Get get = 3;</code>
27187        */
clearGet()27188       public Builder clearGet() {
27189         if (getBuilder_ == null) {
27190           get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
27191           onChanged();
27192         } else {
27193           getBuilder_.clear();
27194         }
27195         bitField0_ = (bitField0_ & ~0x00000004);
27196         return this;
27197       }
27198       /**
27199        * <code>optional .Get get = 3;</code>
27200        */
getGetBuilder()27201       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() {
27202         bitField0_ |= 0x00000004;
27203         onChanged();
27204         return getGetFieldBuilder().getBuilder();
27205       }
27206       /**
27207        * <code>optional .Get get = 3;</code>
27208        */
getGetOrBuilder()27209       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() {
27210         if (getBuilder_ != null) {
27211           return getBuilder_.getMessageOrBuilder();
27212         } else {
27213           return get_;
27214         }
27215       }
27216       /**
27217        * <code>optional .Get get = 3;</code>
27218        */
27219       private com.google.protobuf.SingleFieldBuilder<
27220           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>
getGetFieldBuilder()27221           getGetFieldBuilder() {
27222         if (getBuilder_ == null) {
27223           getBuilder_ = new com.google.protobuf.SingleFieldBuilder<
27224               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>(
27225                   get_,
27226                   getParentForChildren(),
27227                   isClean());
27228           get_ = null;
27229         }
27230         return getBuilder_;
27231       }
27232 
27233       // optional .CoprocessorServiceCall service_call = 4;
27234       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
27235       private com.google.protobuf.SingleFieldBuilder<
27236           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder> serviceCallBuilder_;
27237       /**
27238        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27239        */
hasServiceCall()27240       public boolean hasServiceCall() {
27241         return ((bitField0_ & 0x00000008) == 0x00000008);
27242       }
27243       /**
27244        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27245        */
getServiceCall()27246       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall getServiceCall() {
27247         if (serviceCallBuilder_ == null) {
27248           return serviceCall_;
27249         } else {
27250           return serviceCallBuilder_.getMessage();
27251         }
27252       }
27253       /**
27254        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27255        */
setServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value)27256       public Builder setServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
27257         if (serviceCallBuilder_ == null) {
27258           if (value == null) {
27259             throw new NullPointerException();
27260           }
27261           serviceCall_ = value;
27262           onChanged();
27263         } else {
27264           serviceCallBuilder_.setMessage(value);
27265         }
27266         bitField0_ |= 0x00000008;
27267         return this;
27268       }
27269       /**
27270        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27271        */
setServiceCall( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue)27272       public Builder setServiceCall(
27273           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder builderForValue) {
27274         if (serviceCallBuilder_ == null) {
27275           serviceCall_ = builderForValue.build();
27276           onChanged();
27277         } else {
27278           serviceCallBuilder_.setMessage(builderForValue.build());
27279         }
27280         bitField0_ |= 0x00000008;
27281         return this;
27282       }
27283       /**
27284        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27285        */
mergeServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value)27286       public Builder mergeServiceCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall value) {
27287         if (serviceCallBuilder_ == null) {
27288           if (((bitField0_ & 0x00000008) == 0x00000008) &&
27289               serviceCall_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance()) {
27290             serviceCall_ =
27291               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.newBuilder(serviceCall_).mergeFrom(value).buildPartial();
27292           } else {
27293             serviceCall_ = value;
27294           }
27295           onChanged();
27296         } else {
27297           serviceCallBuilder_.mergeFrom(value);
27298         }
27299         bitField0_ |= 0x00000008;
27300         return this;
27301       }
27302       /**
27303        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27304        */
clearServiceCall()27305       public Builder clearServiceCall() {
27306         if (serviceCallBuilder_ == null) {
27307           serviceCall_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.getDefaultInstance();
27308           onChanged();
27309         } else {
27310           serviceCallBuilder_.clear();
27311         }
27312         bitField0_ = (bitField0_ & ~0x00000008);
27313         return this;
27314       }
27315       /**
27316        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27317        */
getServiceCallBuilder()27318       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder getServiceCallBuilder() {
27319         bitField0_ |= 0x00000008;
27320         onChanged();
27321         return getServiceCallFieldBuilder().getBuilder();
27322       }
27323       /**
27324        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27325        */
getServiceCallOrBuilder()27326       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder getServiceCallOrBuilder() {
27327         if (serviceCallBuilder_ != null) {
27328           return serviceCallBuilder_.getMessageOrBuilder();
27329         } else {
27330           return serviceCall_;
27331         }
27332       }
27333       /**
27334        * <code>optional .CoprocessorServiceCall service_call = 4;</code>
27335        */
27336       private com.google.protobuf.SingleFieldBuilder<
27337           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>
getServiceCallFieldBuilder()27338           getServiceCallFieldBuilder() {
27339         if (serviceCallBuilder_ == null) {
27340           serviceCallBuilder_ = new com.google.protobuf.SingleFieldBuilder<
27341               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCallOrBuilder>(
27342                   serviceCall_,
27343                   getParentForChildren(),
27344                   isClean());
27345           serviceCall_ = null;
27346         }
27347         return serviceCallBuilder_;
27348       }
27349 
27350       // @@protoc_insertion_point(builder_scope:Action)
27351     }
27352 
27353     static {
27354       defaultInstance = new Action(true);
defaultInstance.initFields()27355       defaultInstance.initFields();
27356     }
27357 
27358     // @@protoc_insertion_point(class_scope:Action)
27359   }
27360 
27361   public interface RegionActionOrBuilder
27362       extends com.google.protobuf.MessageOrBuilder {
27363 
27364     // required .RegionSpecifier region = 1;
27365     /**
27366      * <code>required .RegionSpecifier region = 1;</code>
27367      */
hasRegion()27368     boolean hasRegion();
27369     /**
27370      * <code>required .RegionSpecifier region = 1;</code>
27371      */
getRegion()27372     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
27373     /**
27374      * <code>required .RegionSpecifier region = 1;</code>
27375      */
getRegionOrBuilder()27376     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
27377 
27378     // optional bool atomic = 2;
27379     /**
27380      * <code>optional bool atomic = 2;</code>
27381      *
27382      * <pre>
27383      * When set, run mutations as atomic unit.
27384      * </pre>
27385      */
hasAtomic()27386     boolean hasAtomic();
27387     /**
27388      * <code>optional bool atomic = 2;</code>
27389      *
27390      * <pre>
27391      * When set, run mutations as atomic unit.
27392      * </pre>
27393      */
getAtomic()27394     boolean getAtomic();
27395 
27396     // repeated .Action action = 3;
27397     /**
27398      * <code>repeated .Action action = 3;</code>
27399      */
27400     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>
getActionList()27401         getActionList();
27402     /**
27403      * <code>repeated .Action action = 3;</code>
27404      */
getAction(int index)27405     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index);
27406     /**
27407      * <code>repeated .Action action = 3;</code>
27408      */
getActionCount()27409     int getActionCount();
27410     /**
27411      * <code>repeated .Action action = 3;</code>
27412      */
27413     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionOrBuilderList()27414         getActionOrBuilderList();
27415     /**
27416      * <code>repeated .Action action = 3;</code>
27417      */
getActionOrBuilder( int index)27418     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
27419         int index);
27420   }
27421   /**
27422    * Protobuf type {@code RegionAction}
27423    *
27424    * <pre>
27425    **
27426    * Actions to run against a Region.
27427    * </pre>
27428    */
27429   public static final class RegionAction extends
27430       com.google.protobuf.GeneratedMessage
27431       implements RegionActionOrBuilder {
27432     // Use RegionAction.newBuilder() to construct.
RegionAction(com.google.protobuf.GeneratedMessage.Builder<?> builder)27433     private RegionAction(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
27434       super(builder);
27435       this.unknownFields = builder.getUnknownFields();
27436     }
RegionAction(boolean noInit)27437     private RegionAction(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
27438 
27439     private static final RegionAction defaultInstance;
getDefaultInstance()27440     public static RegionAction getDefaultInstance() {
27441       return defaultInstance;
27442     }
27443 
getDefaultInstanceForType()27444     public RegionAction getDefaultInstanceForType() {
27445       return defaultInstance;
27446     }
27447 
27448     private final com.google.protobuf.UnknownFieldSet unknownFields;
27449     @java.lang.Override
27450     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()27451         getUnknownFields() {
27452       return this.unknownFields;
27453     }
RegionAction( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27454     private RegionAction(
27455         com.google.protobuf.CodedInputStream input,
27456         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27457         throws com.google.protobuf.InvalidProtocolBufferException {
27458       initFields();
27459       int mutable_bitField0_ = 0;
27460       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
27461           com.google.protobuf.UnknownFieldSet.newBuilder();
27462       try {
27463         boolean done = false;
27464         while (!done) {
27465           int tag = input.readTag();
27466           switch (tag) {
27467             case 0:
27468               done = true;
27469               break;
27470             default: {
27471               if (!parseUnknownField(input, unknownFields,
27472                                      extensionRegistry, tag)) {
27473                 done = true;
27474               }
27475               break;
27476             }
27477             case 10: {
27478               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null;
27479               if (((bitField0_ & 0x00000001) == 0x00000001)) {
27480                 subBuilder = region_.toBuilder();
27481               }
27482               region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry);
27483               if (subBuilder != null) {
27484                 subBuilder.mergeFrom(region_);
27485                 region_ = subBuilder.buildPartial();
27486               }
27487               bitField0_ |= 0x00000001;
27488               break;
27489             }
27490             case 16: {
27491               bitField0_ |= 0x00000002;
27492               atomic_ = input.readBool();
27493               break;
27494             }
27495             case 26: {
27496               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
27497                 action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>();
27498                 mutable_bitField0_ |= 0x00000004;
27499               }
27500               action_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.PARSER, extensionRegistry));
27501               break;
27502             }
27503           }
27504         }
27505       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
27506         throw e.setUnfinishedMessage(this);
27507       } catch (java.io.IOException e) {
27508         throw new com.google.protobuf.InvalidProtocolBufferException(
27509             e.getMessage()).setUnfinishedMessage(this);
27510       } finally {
27511         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
27512           action_ = java.util.Collections.unmodifiableList(action_);
27513         }
27514         this.unknownFields = unknownFields.build();
27515         makeExtensionsImmutable();
27516       }
27517     }
27518     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27519         getDescriptor() {
27520       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_descriptor;
27521     }
27522 
27523     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27524         internalGetFieldAccessorTable() {
27525       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_fieldAccessorTable
27526           .ensureFieldAccessorsInitialized(
27527               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
27528     }
27529 
27530     public static com.google.protobuf.Parser<RegionAction> PARSER =
27531         new com.google.protobuf.AbstractParser<RegionAction>() {
27532       public RegionAction parsePartialFrom(
27533           com.google.protobuf.CodedInputStream input,
27534           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27535           throws com.google.protobuf.InvalidProtocolBufferException {
27536         return new RegionAction(input, extensionRegistry);
27537       }
27538     };
27539 
27540     @java.lang.Override
getParserForType()27541     public com.google.protobuf.Parser<RegionAction> getParserForType() {
27542       return PARSER;
27543     }
27544 
27545     private int bitField0_;
27546     // required .RegionSpecifier region = 1;
27547     public static final int REGION_FIELD_NUMBER = 1;
27548     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
27549     /**
27550      * <code>required .RegionSpecifier region = 1;</code>
27551      */
hasRegion()27552     public boolean hasRegion() {
27553       return ((bitField0_ & 0x00000001) == 0x00000001);
27554     }
27555     /**
27556      * <code>required .RegionSpecifier region = 1;</code>
27557      */
getRegion()27558     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
27559       return region_;
27560     }
27561     /**
27562      * <code>required .RegionSpecifier region = 1;</code>
27563      */
getRegionOrBuilder()27564     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
27565       return region_;
27566     }
27567 
27568     // optional bool atomic = 2;
27569     public static final int ATOMIC_FIELD_NUMBER = 2;
27570     private boolean atomic_;
27571     /**
27572      * <code>optional bool atomic = 2;</code>
27573      *
27574      * <pre>
27575      * When set, run mutations as atomic unit.
27576      * </pre>
27577      */
hasAtomic()27578     public boolean hasAtomic() {
27579       return ((bitField0_ & 0x00000002) == 0x00000002);
27580     }
27581     /**
27582      * <code>optional bool atomic = 2;</code>
27583      *
27584      * <pre>
27585      * When set, run mutations as atomic unit.
27586      * </pre>
27587      */
getAtomic()27588     public boolean getAtomic() {
27589       return atomic_;
27590     }
27591 
27592     // repeated .Action action = 3;
27593     public static final int ACTION_FIELD_NUMBER = 3;
27594     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_;
27595     /**
27596      * <code>repeated .Action action = 3;</code>
27597      */
getActionList()27598     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
27599       return action_;
27600     }
27601     /**
27602      * <code>repeated .Action action = 3;</code>
27603      */
27604     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionOrBuilderList()27605         getActionOrBuilderList() {
27606       return action_;
27607     }
27608     /**
27609      * <code>repeated .Action action = 3;</code>
27610      */
getActionCount()27611     public int getActionCount() {
27612       return action_.size();
27613     }
27614     /**
27615      * <code>repeated .Action action = 3;</code>
27616      */
getAction(int index)27617     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
27618       return action_.get(index);
27619     }
27620     /**
27621      * <code>repeated .Action action = 3;</code>
27622      */
getActionOrBuilder( int index)27623     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
27624         int index) {
27625       return action_.get(index);
27626     }
27627 
initFields()27628     private void initFields() {
27629       region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
27630       atomic_ = false;
27631       action_ = java.util.Collections.emptyList();
27632     }
27633     private byte memoizedIsInitialized = -1;
isInitialized()27634     public final boolean isInitialized() {
27635       byte isInitialized = memoizedIsInitialized;
27636       if (isInitialized != -1) return isInitialized == 1;
27637 
27638       if (!hasRegion()) {
27639         memoizedIsInitialized = 0;
27640         return false;
27641       }
27642       if (!getRegion().isInitialized()) {
27643         memoizedIsInitialized = 0;
27644         return false;
27645       }
27646       for (int i = 0; i < getActionCount(); i++) {
27647         if (!getAction(i).isInitialized()) {
27648           memoizedIsInitialized = 0;
27649           return false;
27650         }
27651       }
27652       memoizedIsInitialized = 1;
27653       return true;
27654     }
27655 
writeTo(com.google.protobuf.CodedOutputStream output)27656     public void writeTo(com.google.protobuf.CodedOutputStream output)
27657                         throws java.io.IOException {
27658       getSerializedSize();
27659       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27660         output.writeMessage(1, region_);
27661       }
27662       if (((bitField0_ & 0x00000002) == 0x00000002)) {
27663         output.writeBool(2, atomic_);
27664       }
27665       for (int i = 0; i < action_.size(); i++) {
27666         output.writeMessage(3, action_.get(i));
27667       }
27668       getUnknownFields().writeTo(output);
27669     }
27670 
27671     private int memoizedSerializedSize = -1;
getSerializedSize()27672     public int getSerializedSize() {
27673       int size = memoizedSerializedSize;
27674       if (size != -1) return size;
27675 
27676       size = 0;
27677       if (((bitField0_ & 0x00000001) == 0x00000001)) {
27678         size += com.google.protobuf.CodedOutputStream
27679           .computeMessageSize(1, region_);
27680       }
27681       if (((bitField0_ & 0x00000002) == 0x00000002)) {
27682         size += com.google.protobuf.CodedOutputStream
27683           .computeBoolSize(2, atomic_);
27684       }
27685       for (int i = 0; i < action_.size(); i++) {
27686         size += com.google.protobuf.CodedOutputStream
27687           .computeMessageSize(3, action_.get(i));
27688       }
27689       size += getUnknownFields().getSerializedSize();
27690       memoizedSerializedSize = size;
27691       return size;
27692     }
27693 
27694     private static final long serialVersionUID = 0L;
27695     @java.lang.Override
writeReplace()27696     protected java.lang.Object writeReplace()
27697         throws java.io.ObjectStreamException {
27698       return super.writeReplace();
27699     }
27700 
27701     @java.lang.Override
equals(final java.lang.Object obj)27702     public boolean equals(final java.lang.Object obj) {
27703       if (obj == this) {
27704        return true;
27705       }
27706       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)) {
27707         return super.equals(obj);
27708       }
27709       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) obj;
27710 
27711       boolean result = true;
27712       result = result && (hasRegion() == other.hasRegion());
27713       if (hasRegion()) {
27714         result = result && getRegion()
27715             .equals(other.getRegion());
27716       }
27717       result = result && (hasAtomic() == other.hasAtomic());
27718       if (hasAtomic()) {
27719         result = result && (getAtomic()
27720             == other.getAtomic());
27721       }
27722       result = result && getActionList()
27723           .equals(other.getActionList());
27724       result = result &&
27725           getUnknownFields().equals(other.getUnknownFields());
27726       return result;
27727     }
27728 
27729     private int memoizedHashCode = 0;
27730     @java.lang.Override
hashCode()27731     public int hashCode() {
27732       if (memoizedHashCode != 0) {
27733         return memoizedHashCode;
27734       }
27735       int hash = 41;
27736       hash = (19 * hash) + getDescriptorForType().hashCode();
27737       if (hasRegion()) {
27738         hash = (37 * hash) + REGION_FIELD_NUMBER;
27739         hash = (53 * hash) + getRegion().hashCode();
27740       }
27741       if (hasAtomic()) {
27742         hash = (37 * hash) + ATOMIC_FIELD_NUMBER;
27743         hash = (53 * hash) + hashBoolean(getAtomic());
27744       }
27745       if (getActionCount() > 0) {
27746         hash = (37 * hash) + ACTION_FIELD_NUMBER;
27747         hash = (53 * hash) + getActionList().hashCode();
27748       }
27749       hash = (29 * hash) + getUnknownFields().hashCode();
27750       memoizedHashCode = hash;
27751       return hash;
27752     }
27753 
parseFrom( com.google.protobuf.ByteString data)27754     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
27755         com.google.protobuf.ByteString data)
27756         throws com.google.protobuf.InvalidProtocolBufferException {
27757       return PARSER.parseFrom(data);
27758     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27759     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
27760         com.google.protobuf.ByteString data,
27761         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27762         throws com.google.protobuf.InvalidProtocolBufferException {
27763       return PARSER.parseFrom(data, extensionRegistry);
27764     }
parseFrom(byte[] data)27765     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(byte[] data)
27766         throws com.google.protobuf.InvalidProtocolBufferException {
27767       return PARSER.parseFrom(data);
27768     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27769     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
27770         byte[] data,
27771         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27772         throws com.google.protobuf.InvalidProtocolBufferException {
27773       return PARSER.parseFrom(data, extensionRegistry);
27774     }
parseFrom(java.io.InputStream input)27775     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(java.io.InputStream input)
27776         throws java.io.IOException {
27777       return PARSER.parseFrom(input);
27778     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27779     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
27780         java.io.InputStream input,
27781         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27782         throws java.io.IOException {
27783       return PARSER.parseFrom(input, extensionRegistry);
27784     }
parseDelimitedFrom(java.io.InputStream input)27785     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(java.io.InputStream input)
27786         throws java.io.IOException {
27787       return PARSER.parseDelimitedFrom(input);
27788     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27789     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseDelimitedFrom(
27790         java.io.InputStream input,
27791         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27792         throws java.io.IOException {
27793       return PARSER.parseDelimitedFrom(input, extensionRegistry);
27794     }
parseFrom( com.google.protobuf.CodedInputStream input)27795     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
27796         com.google.protobuf.CodedInputStream input)
27797         throws java.io.IOException {
27798       return PARSER.parseFrom(input);
27799     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27800     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parseFrom(
27801         com.google.protobuf.CodedInputStream input,
27802         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
27803         throws java.io.IOException {
27804       return PARSER.parseFrom(input, extensionRegistry);
27805     }
27806 
newBuilder()27807     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()27808     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction prototype)27809     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction prototype) {
27810       return newBuilder().mergeFrom(prototype);
27811     }
toBuilder()27812     public Builder toBuilder() { return newBuilder(this); }
27813 
27814     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)27815     protected Builder newBuilderForType(
27816         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27817       Builder builder = new Builder(parent);
27818       return builder;
27819     }
27820     /**
27821      * Protobuf type {@code RegionAction}
27822      *
27823      * <pre>
27824      **
27825      * Actions to run against a Region.
27826      * </pre>
27827      */
27828     public static final class Builder extends
27829         com.google.protobuf.GeneratedMessage.Builder<Builder>
27830        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder {
27831       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()27832           getDescriptor() {
27833         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_descriptor;
27834       }
27835 
27836       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()27837           internalGetFieldAccessorTable() {
27838         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_fieldAccessorTable
27839             .ensureFieldAccessorsInitialized(
27840                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder.class);
27841       }
27842 
27843       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.newBuilder()
Builder()27844       private Builder() {
27845         maybeForceBuilderInitialization();
27846       }
27847 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)27848       private Builder(
27849           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
27850         super(parent);
27851         maybeForceBuilderInitialization();
27852       }
maybeForceBuilderInitialization()27853       private void maybeForceBuilderInitialization() {
27854         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
27855           getRegionFieldBuilder();
27856           getActionFieldBuilder();
27857         }
27858       }
create()27859       private static Builder create() {
27860         return new Builder();
27861       }
27862 
clear()27863       public Builder clear() {
27864         super.clear();
27865         if (regionBuilder_ == null) {
27866           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
27867         } else {
27868           regionBuilder_.clear();
27869         }
27870         bitField0_ = (bitField0_ & ~0x00000001);
27871         atomic_ = false;
27872         bitField0_ = (bitField0_ & ~0x00000002);
27873         if (actionBuilder_ == null) {
27874           action_ = java.util.Collections.emptyList();
27875           bitField0_ = (bitField0_ & ~0x00000004);
27876         } else {
27877           actionBuilder_.clear();
27878         }
27879         return this;
27880       }
27881 
clone()27882       public Builder clone() {
27883         return create().mergeFrom(buildPartial());
27884       }
27885 
27886       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()27887           getDescriptorForType() {
27888         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionAction_descriptor;
27889       }
27890 
getDefaultInstanceForType()27891       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getDefaultInstanceForType() {
27892         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance();
27893       }
27894 
build()27895       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction build() {
27896         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = buildPartial();
27897         if (!result.isInitialized()) {
27898           throw newUninitializedMessageException(result);
27899         }
27900         return result;
27901       }
27902 
buildPartial()27903       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction buildPartial() {
27904         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction(this);
27905         int from_bitField0_ = bitField0_;
27906         int to_bitField0_ = 0;
27907         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
27908           to_bitField0_ |= 0x00000001;
27909         }
27910         if (regionBuilder_ == null) {
27911           result.region_ = region_;
27912         } else {
27913           result.region_ = regionBuilder_.build();
27914         }
27915         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
27916           to_bitField0_ |= 0x00000002;
27917         }
27918         result.atomic_ = atomic_;
27919         if (actionBuilder_ == null) {
27920           if (((bitField0_ & 0x00000004) == 0x00000004)) {
27921             action_ = java.util.Collections.unmodifiableList(action_);
27922             bitField0_ = (bitField0_ & ~0x00000004);
27923           }
27924           result.action_ = action_;
27925         } else {
27926           result.action_ = actionBuilder_.build();
27927         }
27928         result.bitField0_ = to_bitField0_;
27929         onBuilt();
27930         return result;
27931       }
27932 
mergeFrom(com.google.protobuf.Message other)27933       public Builder mergeFrom(com.google.protobuf.Message other) {
27934         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) {
27935           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction)other);
27936         } else {
27937           super.mergeFrom(other);
27938           return this;
27939         }
27940       }
27941 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other)27942       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction other) {
27943         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance()) return this;
27944         if (other.hasRegion()) {
27945           mergeRegion(other.getRegion());
27946         }
27947         if (other.hasAtomic()) {
27948           setAtomic(other.getAtomic());
27949         }
27950         if (actionBuilder_ == null) {
27951           if (!other.action_.isEmpty()) {
27952             if (action_.isEmpty()) {
27953               action_ = other.action_;
27954               bitField0_ = (bitField0_ & ~0x00000004);
27955             } else {
27956               ensureActionIsMutable();
27957               action_.addAll(other.action_);
27958             }
27959             onChanged();
27960           }
27961         } else {
27962           if (!other.action_.isEmpty()) {
27963             if (actionBuilder_.isEmpty()) {
27964               actionBuilder_.dispose();
27965               actionBuilder_ = null;
27966               action_ = other.action_;
27967               bitField0_ = (bitField0_ & ~0x00000004);
27968               actionBuilder_ =
27969                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
27970                    getActionFieldBuilder() : null;
27971             } else {
27972               actionBuilder_.addAllMessages(other.action_);
27973             }
27974           }
27975         }
27976         this.mergeUnknownFields(other.getUnknownFields());
27977         return this;
27978       }
27979 
isInitialized()27980       public final boolean isInitialized() {
27981         if (!hasRegion()) {
27982 
27983           return false;
27984         }
27985         if (!getRegion().isInitialized()) {
27986 
27987           return false;
27988         }
27989         for (int i = 0; i < getActionCount(); i++) {
27990           if (!getAction(i).isInitialized()) {
27991 
27992             return false;
27993           }
27994         }
27995         return true;
27996       }
27997 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)27998       public Builder mergeFrom(
27999           com.google.protobuf.CodedInputStream input,
28000           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28001           throws java.io.IOException {
28002         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction parsedMessage = null;
28003         try {
28004           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28005         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28006           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction) e.getUnfinishedMessage();
28007           throw e;
28008         } finally {
28009           if (parsedMessage != null) {
28010             mergeFrom(parsedMessage);
28011           }
28012         }
28013         return this;
28014       }
28015       private int bitField0_;
28016 
28017       // required .RegionSpecifier region = 1;
28018       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
28019       private com.google.protobuf.SingleFieldBuilder<
28020           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
28021       /**
28022        * <code>required .RegionSpecifier region = 1;</code>
28023        */
hasRegion()28024       public boolean hasRegion() {
28025         return ((bitField0_ & 0x00000001) == 0x00000001);
28026       }
28027       /**
28028        * <code>required .RegionSpecifier region = 1;</code>
28029        */
getRegion()28030       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
28031         if (regionBuilder_ == null) {
28032           return region_;
28033         } else {
28034           return regionBuilder_.getMessage();
28035         }
28036       }
28037       /**
28038        * <code>required .RegionSpecifier region = 1;</code>
28039        */
setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)28040       public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
28041         if (regionBuilder_ == null) {
28042           if (value == null) {
28043             throw new NullPointerException();
28044           }
28045           region_ = value;
28046           onChanged();
28047         } else {
28048           regionBuilder_.setMessage(value);
28049         }
28050         bitField0_ |= 0x00000001;
28051         return this;
28052       }
28053       /**
28054        * <code>required .RegionSpecifier region = 1;</code>
28055        */
setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)28056       public Builder setRegion(
28057           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
28058         if (regionBuilder_ == null) {
28059           region_ = builderForValue.build();
28060           onChanged();
28061         } else {
28062           regionBuilder_.setMessage(builderForValue.build());
28063         }
28064         bitField0_ |= 0x00000001;
28065         return this;
28066       }
28067       /**
28068        * <code>required .RegionSpecifier region = 1;</code>
28069        */
mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)28070       public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
28071         if (regionBuilder_ == null) {
28072           if (((bitField0_ & 0x00000001) == 0x00000001) &&
28073               region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
28074             region_ =
28075               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
28076           } else {
28077             region_ = value;
28078           }
28079           onChanged();
28080         } else {
28081           regionBuilder_.mergeFrom(value);
28082         }
28083         bitField0_ |= 0x00000001;
28084         return this;
28085       }
28086       /**
28087        * <code>required .RegionSpecifier region = 1;</code>
28088        */
clearRegion()28089       public Builder clearRegion() {
28090         if (regionBuilder_ == null) {
28091           region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
28092           onChanged();
28093         } else {
28094           regionBuilder_.clear();
28095         }
28096         bitField0_ = (bitField0_ & ~0x00000001);
28097         return this;
28098       }
28099       /**
28100        * <code>required .RegionSpecifier region = 1;</code>
28101        */
getRegionBuilder()28102       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
28103         bitField0_ |= 0x00000001;
28104         onChanged();
28105         return getRegionFieldBuilder().getBuilder();
28106       }
28107       /**
28108        * <code>required .RegionSpecifier region = 1;</code>
28109        */
getRegionOrBuilder()28110       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
28111         if (regionBuilder_ != null) {
28112           return regionBuilder_.getMessageOrBuilder();
28113         } else {
28114           return region_;
28115         }
28116       }
28117       /**
28118        * <code>required .RegionSpecifier region = 1;</code>
28119        */
28120       private com.google.protobuf.SingleFieldBuilder<
28121           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
getRegionFieldBuilder()28122           getRegionFieldBuilder() {
28123         if (regionBuilder_ == null) {
28124           regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
28125               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
28126                   region_,
28127                   getParentForChildren(),
28128                   isClean());
28129           region_ = null;
28130         }
28131         return regionBuilder_;
28132       }
28133 
28134       // optional bool atomic = 2;
28135       private boolean atomic_ ;
28136       /**
28137        * <code>optional bool atomic = 2;</code>
28138        *
28139        * <pre>
28140        * When set, run mutations as atomic unit.
28141        * </pre>
28142        */
hasAtomic()28143       public boolean hasAtomic() {
28144         return ((bitField0_ & 0x00000002) == 0x00000002);
28145       }
28146       /**
28147        * <code>optional bool atomic = 2;</code>
28148        *
28149        * <pre>
28150        * When set, run mutations as atomic unit.
28151        * </pre>
28152        */
getAtomic()28153       public boolean getAtomic() {
28154         return atomic_;
28155       }
28156       /**
28157        * <code>optional bool atomic = 2;</code>
28158        *
28159        * <pre>
28160        * When set, run mutations as atomic unit.
28161        * </pre>
28162        */
setAtomic(boolean value)28163       public Builder setAtomic(boolean value) {
28164         bitField0_ |= 0x00000002;
28165         atomic_ = value;
28166         onChanged();
28167         return this;
28168       }
28169       /**
28170        * <code>optional bool atomic = 2;</code>
28171        *
28172        * <pre>
28173        * When set, run mutations as atomic unit.
28174        * </pre>
28175        */
clearAtomic()28176       public Builder clearAtomic() {
28177         bitField0_ = (bitField0_ & ~0x00000002);
28178         atomic_ = false;
28179         onChanged();
28180         return this;
28181       }
28182 
28183       // repeated .Action action = 3;
28184       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> action_ =
28185         java.util.Collections.emptyList();
ensureActionIsMutable()28186       private void ensureActionIsMutable() {
28187         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
28188           action_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action>(action_);
28189           bitField0_ |= 0x00000004;
28190          }
28191       }
28192 
28193       private com.google.protobuf.RepeatedFieldBuilder<
28194           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder> actionBuilder_;
28195 
28196       /**
28197        * <code>repeated .Action action = 3;</code>
28198        */
getActionList()28199       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> getActionList() {
28200         if (actionBuilder_ == null) {
28201           return java.util.Collections.unmodifiableList(action_);
28202         } else {
28203           return actionBuilder_.getMessageList();
28204         }
28205       }
28206       /**
28207        * <code>repeated .Action action = 3;</code>
28208        */
getActionCount()28209       public int getActionCount() {
28210         if (actionBuilder_ == null) {
28211           return action_.size();
28212         } else {
28213           return actionBuilder_.getCount();
28214         }
28215       }
28216       /**
28217        * <code>repeated .Action action = 3;</code>
28218        */
getAction(int index)28219       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action getAction(int index) {
28220         if (actionBuilder_ == null) {
28221           return action_.get(index);
28222         } else {
28223           return actionBuilder_.getMessage(index);
28224         }
28225       }
28226       /**
28227        * <code>repeated .Action action = 3;</code>
28228        */
setAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value)28229       public Builder setAction(
28230           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
28231         if (actionBuilder_ == null) {
28232           if (value == null) {
28233             throw new NullPointerException();
28234           }
28235           ensureActionIsMutable();
28236           action_.set(index, value);
28237           onChanged();
28238         } else {
28239           actionBuilder_.setMessage(index, value);
28240         }
28241         return this;
28242       }
28243       /**
28244        * <code>repeated .Action action = 3;</code>
28245        */
setAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue)28246       public Builder setAction(
28247           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
28248         if (actionBuilder_ == null) {
28249           ensureActionIsMutable();
28250           action_.set(index, builderForValue.build());
28251           onChanged();
28252         } else {
28253           actionBuilder_.setMessage(index, builderForValue.build());
28254         }
28255         return this;
28256       }
28257       /**
28258        * <code>repeated .Action action = 3;</code>
28259        */
addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value)28260       public Builder addAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
28261         if (actionBuilder_ == null) {
28262           if (value == null) {
28263             throw new NullPointerException();
28264           }
28265           ensureActionIsMutable();
28266           action_.add(value);
28267           onChanged();
28268         } else {
28269           actionBuilder_.addMessage(value);
28270         }
28271         return this;
28272       }
28273       /**
28274        * <code>repeated .Action action = 3;</code>
28275        */
addAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value)28276       public Builder addAction(
28277           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action value) {
28278         if (actionBuilder_ == null) {
28279           if (value == null) {
28280             throw new NullPointerException();
28281           }
28282           ensureActionIsMutable();
28283           action_.add(index, value);
28284           onChanged();
28285         } else {
28286           actionBuilder_.addMessage(index, value);
28287         }
28288         return this;
28289       }
28290       /**
28291        * <code>repeated .Action action = 3;</code>
28292        */
addAction( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue)28293       public Builder addAction(
28294           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
28295         if (actionBuilder_ == null) {
28296           ensureActionIsMutable();
28297           action_.add(builderForValue.build());
28298           onChanged();
28299         } else {
28300           actionBuilder_.addMessage(builderForValue.build());
28301         }
28302         return this;
28303       }
28304       /**
28305        * <code>repeated .Action action = 3;</code>
28306        */
addAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue)28307       public Builder addAction(
28308           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder builderForValue) {
28309         if (actionBuilder_ == null) {
28310           ensureActionIsMutable();
28311           action_.add(index, builderForValue.build());
28312           onChanged();
28313         } else {
28314           actionBuilder_.addMessage(index, builderForValue.build());
28315         }
28316         return this;
28317       }
28318       /**
28319        * <code>repeated .Action action = 3;</code>
28320        */
addAllAction( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> values)28321       public Builder addAllAction(
28322           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action> values) {
28323         if (actionBuilder_ == null) {
28324           ensureActionIsMutable();
28325           super.addAll(values, action_);
28326           onChanged();
28327         } else {
28328           actionBuilder_.addAllMessages(values);
28329         }
28330         return this;
28331       }
28332       /**
28333        * <code>repeated .Action action = 3;</code>
28334        */
clearAction()28335       public Builder clearAction() {
28336         if (actionBuilder_ == null) {
28337           action_ = java.util.Collections.emptyList();
28338           bitField0_ = (bitField0_ & ~0x00000004);
28339           onChanged();
28340         } else {
28341           actionBuilder_.clear();
28342         }
28343         return this;
28344       }
28345       /**
28346        * <code>repeated .Action action = 3;</code>
28347        */
removeAction(int index)28348       public Builder removeAction(int index) {
28349         if (actionBuilder_ == null) {
28350           ensureActionIsMutable();
28351           action_.remove(index);
28352           onChanged();
28353         } else {
28354           actionBuilder_.remove(index);
28355         }
28356         return this;
28357       }
28358       /**
28359        * <code>repeated .Action action = 3;</code>
28360        */
getActionBuilder( int index)28361       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder getActionBuilder(
28362           int index) {
28363         return getActionFieldBuilder().getBuilder(index);
28364       }
28365       /**
28366        * <code>repeated .Action action = 3;</code>
28367        */
getActionOrBuilder( int index)28368       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder getActionOrBuilder(
28369           int index) {
28370         if (actionBuilder_ == null) {
28371           return action_.get(index);  } else {
28372           return actionBuilder_.getMessageOrBuilder(index);
28373         }
28374       }
28375       /**
28376        * <code>repeated .Action action = 3;</code>
28377        */
28378       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionOrBuilderList()28379            getActionOrBuilderList() {
28380         if (actionBuilder_ != null) {
28381           return actionBuilder_.getMessageOrBuilderList();
28382         } else {
28383           return java.util.Collections.unmodifiableList(action_);
28384         }
28385       }
28386       /**
28387        * <code>repeated .Action action = 3;</code>
28388        */
addActionBuilder()28389       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder() {
28390         return getActionFieldBuilder().addBuilder(
28391             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
28392       }
28393       /**
28394        * <code>repeated .Action action = 3;</code>
28395        */
addActionBuilder( int index)28396       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder addActionBuilder(
28397           int index) {
28398         return getActionFieldBuilder().addBuilder(
28399             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.getDefaultInstance());
28400       }
28401       /**
28402        * <code>repeated .Action action = 3;</code>
28403        */
28404       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder>
getActionBuilderList()28405            getActionBuilderList() {
28406         return getActionFieldBuilder().getBuilderList();
28407       }
28408       private com.google.protobuf.RepeatedFieldBuilder<
28409           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>
getActionFieldBuilder()28410           getActionFieldBuilder() {
28411         if (actionBuilder_ == null) {
28412           actionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
28413               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Action.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionOrBuilder>(
28414                   action_,
28415                   ((bitField0_ & 0x00000004) == 0x00000004),
28416                   getParentForChildren(),
28417                   isClean());
28418           action_ = null;
28419         }
28420         return actionBuilder_;
28421       }
28422 
28423       // @@protoc_insertion_point(builder_scope:RegionAction)
28424     }
28425 
28426     static {
28427       defaultInstance = new RegionAction(true);
defaultInstance.initFields()28428       defaultInstance.initFields();
28429     }
28430 
28431     // @@protoc_insertion_point(class_scope:RegionAction)
28432   }
28433 
28434   public interface RegionLoadStatsOrBuilder
28435       extends com.google.protobuf.MessageOrBuilder {
28436 
28437     // optional int32 memstoreLoad = 1 [default = 0];
28438     /**
28439      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
28440      *
28441      * <pre>
28442      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
28443      * </pre>
28444      */
hasMemstoreLoad()28445     boolean hasMemstoreLoad();
28446     /**
28447      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
28448      *
28449      * <pre>
28450      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
28451      * </pre>
28452      */
getMemstoreLoad()28453     int getMemstoreLoad();
28454 
28455     // optional int32 heapOccupancy = 2 [default = 0];
28456     /**
28457      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28458      *
28459      * <pre>
28460      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28461      * We can move this to "ServerLoadStats" should we develop them.
28462      * </pre>
28463      */
hasHeapOccupancy()28464     boolean hasHeapOccupancy();
28465     /**
28466      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28467      *
28468      * <pre>
28469      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28470      * We can move this to "ServerLoadStats" should we develop them.
28471      * </pre>
28472      */
getHeapOccupancy()28473     int getHeapOccupancy();
28474 
28475     // optional int32 compactionPressure = 3 [default = 0];
28476     /**
28477      * <code>optional int32 compactionPressure = 3 [default = 0];</code>
28478      *
28479      * <pre>
28480      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
28481      * </pre>
28482      */
hasCompactionPressure()28483     boolean hasCompactionPressure();
28484     /**
28485      * <code>optional int32 compactionPressure = 3 [default = 0];</code>
28486      *
28487      * <pre>
28488      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
28489      * </pre>
28490      */
getCompactionPressure()28491     int getCompactionPressure();
28492   }
28493   /**
28494    * Protobuf type {@code RegionLoadStats}
28495    *
28496    * <pre>
28497    *
28498    * Statistics about the current load on the region
28499    * </pre>
28500    */
28501   public static final class RegionLoadStats extends
28502       com.google.protobuf.GeneratedMessage
28503       implements RegionLoadStatsOrBuilder {
28504     // Use RegionLoadStats.newBuilder() to construct.
RegionLoadStats(com.google.protobuf.GeneratedMessage.Builder<?> builder)28505     private RegionLoadStats(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
28506       super(builder);
28507       this.unknownFields = builder.getUnknownFields();
28508     }
RegionLoadStats(boolean noInit)28509     private RegionLoadStats(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
28510 
28511     private static final RegionLoadStats defaultInstance;
getDefaultInstance()28512     public static RegionLoadStats getDefaultInstance() {
28513       return defaultInstance;
28514     }
28515 
getDefaultInstanceForType()28516     public RegionLoadStats getDefaultInstanceForType() {
28517       return defaultInstance;
28518     }
28519 
28520     private final com.google.protobuf.UnknownFieldSet unknownFields;
28521     @java.lang.Override
28522     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()28523         getUnknownFields() {
28524       return this.unknownFields;
28525     }
RegionLoadStats( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28526     private RegionLoadStats(
28527         com.google.protobuf.CodedInputStream input,
28528         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28529         throws com.google.protobuf.InvalidProtocolBufferException {
28530       initFields();
28531       int mutable_bitField0_ = 0;
28532       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
28533           com.google.protobuf.UnknownFieldSet.newBuilder();
28534       try {
28535         boolean done = false;
28536         while (!done) {
28537           int tag = input.readTag();
28538           switch (tag) {
28539             case 0:
28540               done = true;
28541               break;
28542             default: {
28543               if (!parseUnknownField(input, unknownFields,
28544                                      extensionRegistry, tag)) {
28545                 done = true;
28546               }
28547               break;
28548             }
28549             case 8: {
28550               bitField0_ |= 0x00000001;
28551               memstoreLoad_ = input.readInt32();
28552               break;
28553             }
28554             case 16: {
28555               bitField0_ |= 0x00000002;
28556               heapOccupancy_ = input.readInt32();
28557               break;
28558             }
28559             case 24: {
28560               bitField0_ |= 0x00000004;
28561               compactionPressure_ = input.readInt32();
28562               break;
28563             }
28564           }
28565         }
28566       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28567         throw e.setUnfinishedMessage(this);
28568       } catch (java.io.IOException e) {
28569         throw new com.google.protobuf.InvalidProtocolBufferException(
28570             e.getMessage()).setUnfinishedMessage(this);
28571       } finally {
28572         this.unknownFields = unknownFields.build();
28573         makeExtensionsImmutable();
28574       }
28575     }
28576     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()28577         getDescriptor() {
28578       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_descriptor;
28579     }
28580 
28581     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()28582         internalGetFieldAccessorTable() {
28583       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_fieldAccessorTable
28584           .ensureFieldAccessorsInitialized(
28585               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
28586     }
28587 
28588     public static com.google.protobuf.Parser<RegionLoadStats> PARSER =
28589         new com.google.protobuf.AbstractParser<RegionLoadStats>() {
28590       public RegionLoadStats parsePartialFrom(
28591           com.google.protobuf.CodedInputStream input,
28592           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28593           throws com.google.protobuf.InvalidProtocolBufferException {
28594         return new RegionLoadStats(input, extensionRegistry);
28595       }
28596     };
28597 
28598     @java.lang.Override
getParserForType()28599     public com.google.protobuf.Parser<RegionLoadStats> getParserForType() {
28600       return PARSER;
28601     }
28602 
28603     private int bitField0_;
28604     // optional int32 memstoreLoad = 1 [default = 0];
28605     public static final int MEMSTORELOAD_FIELD_NUMBER = 1;
28606     private int memstoreLoad_;
28607     /**
28608      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
28609      *
28610      * <pre>
28611      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
28612      * </pre>
28613      */
hasMemstoreLoad()28614     public boolean hasMemstoreLoad() {
28615       return ((bitField0_ & 0x00000001) == 0x00000001);
28616     }
28617     /**
28618      * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
28619      *
28620      * <pre>
28621      * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
28622      * </pre>
28623      */
getMemstoreLoad()28624     public int getMemstoreLoad() {
28625       return memstoreLoad_;
28626     }
28627 
28628     // optional int32 heapOccupancy = 2 [default = 0];
28629     public static final int HEAPOCCUPANCY_FIELD_NUMBER = 2;
28630     private int heapOccupancy_;
28631     /**
28632      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28633      *
28634      * <pre>
28635      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28636      * We can move this to "ServerLoadStats" should we develop them.
28637      * </pre>
28638      */
hasHeapOccupancy()28639     public boolean hasHeapOccupancy() {
28640       return ((bitField0_ & 0x00000002) == 0x00000002);
28641     }
28642     /**
28643      * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
28644      *
28645      * <pre>
28646      * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
28647      * We can move this to "ServerLoadStats" should we develop them.
28648      * </pre>
28649      */
getHeapOccupancy()28650     public int getHeapOccupancy() {
28651       return heapOccupancy_;
28652     }
28653 
28654     // optional int32 compactionPressure = 3 [default = 0];
28655     public static final int COMPACTIONPRESSURE_FIELD_NUMBER = 3;
28656     private int compactionPressure_;
28657     /**
28658      * <code>optional int32 compactionPressure = 3 [default = 0];</code>
28659      *
28660      * <pre>
28661      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
28662      * </pre>
28663      */
hasCompactionPressure()28664     public boolean hasCompactionPressure() {
28665       return ((bitField0_ & 0x00000004) == 0x00000004);
28666     }
28667     /**
28668      * <code>optional int32 compactionPressure = 3 [default = 0];</code>
28669      *
28670      * <pre>
28671      * Compaction pressure. Guaranteed to be positive, between 0 and 100.
28672      * </pre>
28673      */
getCompactionPressure()28674     public int getCompactionPressure() {
28675       return compactionPressure_;
28676     }
28677 
initFields()28678     private void initFields() {
28679       memstoreLoad_ = 0;
28680       heapOccupancy_ = 0;
28681       compactionPressure_ = 0;
28682     }
28683     private byte memoizedIsInitialized = -1;
isInitialized()28684     public final boolean isInitialized() {
28685       byte isInitialized = memoizedIsInitialized;
28686       if (isInitialized != -1) return isInitialized == 1;
28687 
28688       memoizedIsInitialized = 1;
28689       return true;
28690     }
28691 
writeTo(com.google.protobuf.CodedOutputStream output)28692     public void writeTo(com.google.protobuf.CodedOutputStream output)
28693                         throws java.io.IOException {
28694       getSerializedSize();
28695       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28696         output.writeInt32(1, memstoreLoad_);
28697       }
28698       if (((bitField0_ & 0x00000002) == 0x00000002)) {
28699         output.writeInt32(2, heapOccupancy_);
28700       }
28701       if (((bitField0_ & 0x00000004) == 0x00000004)) {
28702         output.writeInt32(3, compactionPressure_);
28703       }
28704       getUnknownFields().writeTo(output);
28705     }
28706 
28707     private int memoizedSerializedSize = -1;
getSerializedSize()28708     public int getSerializedSize() {
28709       int size = memoizedSerializedSize;
28710       if (size != -1) return size;
28711 
28712       size = 0;
28713       if (((bitField0_ & 0x00000001) == 0x00000001)) {
28714         size += com.google.protobuf.CodedOutputStream
28715           .computeInt32Size(1, memstoreLoad_);
28716       }
28717       if (((bitField0_ & 0x00000002) == 0x00000002)) {
28718         size += com.google.protobuf.CodedOutputStream
28719           .computeInt32Size(2, heapOccupancy_);
28720       }
28721       if (((bitField0_ & 0x00000004) == 0x00000004)) {
28722         size += com.google.protobuf.CodedOutputStream
28723           .computeInt32Size(3, compactionPressure_);
28724       }
28725       size += getUnknownFields().getSerializedSize();
28726       memoizedSerializedSize = size;
28727       return size;
28728     }
28729 
28730     private static final long serialVersionUID = 0L;
28731     @java.lang.Override
writeReplace()28732     protected java.lang.Object writeReplace()
28733         throws java.io.ObjectStreamException {
28734       return super.writeReplace();
28735     }
28736 
28737     @java.lang.Override
equals(final java.lang.Object obj)28738     public boolean equals(final java.lang.Object obj) {
28739       if (obj == this) {
28740        return true;
28741       }
28742       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)) {
28743         return super.equals(obj);
28744       }
28745       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) obj;
28746 
28747       boolean result = true;
28748       result = result && (hasMemstoreLoad() == other.hasMemstoreLoad());
28749       if (hasMemstoreLoad()) {
28750         result = result && (getMemstoreLoad()
28751             == other.getMemstoreLoad());
28752       }
28753       result = result && (hasHeapOccupancy() == other.hasHeapOccupancy());
28754       if (hasHeapOccupancy()) {
28755         result = result && (getHeapOccupancy()
28756             == other.getHeapOccupancy());
28757       }
28758       result = result && (hasCompactionPressure() == other.hasCompactionPressure());
28759       if (hasCompactionPressure()) {
28760         result = result && (getCompactionPressure()
28761             == other.getCompactionPressure());
28762       }
28763       result = result &&
28764           getUnknownFields().equals(other.getUnknownFields());
28765       return result;
28766     }
28767 
28768     private int memoizedHashCode = 0;
28769     @java.lang.Override
hashCode()28770     public int hashCode() {
28771       if (memoizedHashCode != 0) {
28772         return memoizedHashCode;
28773       }
28774       int hash = 41;
28775       hash = (19 * hash) + getDescriptorForType().hashCode();
28776       if (hasMemstoreLoad()) {
28777         hash = (37 * hash) + MEMSTORELOAD_FIELD_NUMBER;
28778         hash = (53 * hash) + getMemstoreLoad();
28779       }
28780       if (hasHeapOccupancy()) {
28781         hash = (37 * hash) + HEAPOCCUPANCY_FIELD_NUMBER;
28782         hash = (53 * hash) + getHeapOccupancy();
28783       }
28784       if (hasCompactionPressure()) {
28785         hash = (37 * hash) + COMPACTIONPRESSURE_FIELD_NUMBER;
28786         hash = (53 * hash) + getCompactionPressure();
28787       }
28788       hash = (29 * hash) + getUnknownFields().hashCode();
28789       memoizedHashCode = hash;
28790       return hash;
28791     }
28792 
parseFrom( com.google.protobuf.ByteString data)28793     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
28794         com.google.protobuf.ByteString data)
28795         throws com.google.protobuf.InvalidProtocolBufferException {
28796       return PARSER.parseFrom(data);
28797     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28798     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
28799         com.google.protobuf.ByteString data,
28800         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28801         throws com.google.protobuf.InvalidProtocolBufferException {
28802       return PARSER.parseFrom(data, extensionRegistry);
28803     }
parseFrom(byte[] data)28804     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(byte[] data)
28805         throws com.google.protobuf.InvalidProtocolBufferException {
28806       return PARSER.parseFrom(data);
28807     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28808     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
28809         byte[] data,
28810         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28811         throws com.google.protobuf.InvalidProtocolBufferException {
28812       return PARSER.parseFrom(data, extensionRegistry);
28813     }
parseFrom(java.io.InputStream input)28814     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(java.io.InputStream input)
28815         throws java.io.IOException {
28816       return PARSER.parseFrom(input);
28817     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28818     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
28819         java.io.InputStream input,
28820         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28821         throws java.io.IOException {
28822       return PARSER.parseFrom(input, extensionRegistry);
28823     }
parseDelimitedFrom(java.io.InputStream input)28824     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(java.io.InputStream input)
28825         throws java.io.IOException {
28826       return PARSER.parseDelimitedFrom(input);
28827     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28828     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseDelimitedFrom(
28829         java.io.InputStream input,
28830         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28831         throws java.io.IOException {
28832       return PARSER.parseDelimitedFrom(input, extensionRegistry);
28833     }
parseFrom( com.google.protobuf.CodedInputStream input)28834     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
28835         com.google.protobuf.CodedInputStream input)
28836         throws java.io.IOException {
28837       return PARSER.parseFrom(input);
28838     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28839     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parseFrom(
28840         com.google.protobuf.CodedInputStream input,
28841         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28842         throws java.io.IOException {
28843       return PARSER.parseFrom(input, extensionRegistry);
28844     }
28845 
newBuilder()28846     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()28847     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats prototype)28848     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats prototype) {
28849       return newBuilder().mergeFrom(prototype);
28850     }
toBuilder()28851     public Builder toBuilder() { return newBuilder(this); }
28852 
28853     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)28854     protected Builder newBuilderForType(
28855         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28856       Builder builder = new Builder(parent);
28857       return builder;
28858     }
28859     /**
28860      * Protobuf type {@code RegionLoadStats}
28861      *
28862      * <pre>
28863      *
28864      * Statistics about the current load on the region
28865      * </pre>
28866      */
28867     public static final class Builder extends
28868         com.google.protobuf.GeneratedMessage.Builder<Builder>
28869        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder {
28870       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()28871           getDescriptor() {
28872         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_descriptor;
28873       }
28874 
28875       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()28876           internalGetFieldAccessorTable() {
28877         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_fieldAccessorTable
28878             .ensureFieldAccessorsInitialized(
28879                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder.class);
28880       }
28881 
28882       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder()
Builder()28883       private Builder() {
28884         maybeForceBuilderInitialization();
28885       }
28886 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)28887       private Builder(
28888           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
28889         super(parent);
28890         maybeForceBuilderInitialization();
28891       }
maybeForceBuilderInitialization()28892       private void maybeForceBuilderInitialization() {
28893         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
28894         }
28895       }
create()28896       private static Builder create() {
28897         return new Builder();
28898       }
28899 
clear()28900       public Builder clear() {
28901         super.clear();
28902         memstoreLoad_ = 0;
28903         bitField0_ = (bitField0_ & ~0x00000001);
28904         heapOccupancy_ = 0;
28905         bitField0_ = (bitField0_ & ~0x00000002);
28906         compactionPressure_ = 0;
28907         bitField0_ = (bitField0_ & ~0x00000004);
28908         return this;
28909       }
28910 
clone()28911       public Builder clone() {
28912         return create().mergeFrom(buildPartial());
28913       }
28914 
28915       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()28916           getDescriptorForType() {
28917         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionLoadStats_descriptor;
28918       }
28919 
getDefaultInstanceForType()28920       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getDefaultInstanceForType() {
28921         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
28922       }
28923 
build()28924       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats build() {
28925         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = buildPartial();
28926         if (!result.isInitialized()) {
28927           throw newUninitializedMessageException(result);
28928         }
28929         return result;
28930       }
28931 
buildPartial()28932       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats buildPartial() {
28933         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats(this);
28934         int from_bitField0_ = bitField0_;
28935         int to_bitField0_ = 0;
28936         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
28937           to_bitField0_ |= 0x00000001;
28938         }
28939         result.memstoreLoad_ = memstoreLoad_;
28940         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
28941           to_bitField0_ |= 0x00000002;
28942         }
28943         result.heapOccupancy_ = heapOccupancy_;
28944         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
28945           to_bitField0_ |= 0x00000004;
28946         }
28947         result.compactionPressure_ = compactionPressure_;
28948         result.bitField0_ = to_bitField0_;
28949         onBuilt();
28950         return result;
28951       }
28952 
mergeFrom(com.google.protobuf.Message other)28953       public Builder mergeFrom(com.google.protobuf.Message other) {
28954         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) {
28955           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats)other);
28956         } else {
28957           super.mergeFrom(other);
28958           return this;
28959         }
28960       }
28961 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other)28962       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats other) {
28963         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) return this;
28964         if (other.hasMemstoreLoad()) {
28965           setMemstoreLoad(other.getMemstoreLoad());
28966         }
28967         if (other.hasHeapOccupancy()) {
28968           setHeapOccupancy(other.getHeapOccupancy());
28969         }
28970         if (other.hasCompactionPressure()) {
28971           setCompactionPressure(other.getCompactionPressure());
28972         }
28973         this.mergeUnknownFields(other.getUnknownFields());
28974         return this;
28975       }
28976 
isInitialized()28977       public final boolean isInitialized() {
28978         return true;
28979       }
28980 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)28981       public Builder mergeFrom(
28982           com.google.protobuf.CodedInputStream input,
28983           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
28984           throws java.io.IOException {
28985         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats parsedMessage = null;
28986         try {
28987           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
28988         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
28989           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats) e.getUnfinishedMessage();
28990           throw e;
28991         } finally {
28992           if (parsedMessage != null) {
28993             mergeFrom(parsedMessage);
28994           }
28995         }
28996         return this;
28997       }
28998       private int bitField0_;
28999 
29000       // optional int32 memstoreLoad = 1 [default = 0];
29001       private int memstoreLoad_ ;
29002       /**
29003        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
29004        *
29005        * <pre>
29006        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
29007        * </pre>
29008        */
hasMemstoreLoad()29009       public boolean hasMemstoreLoad() {
29010         return ((bitField0_ & 0x00000001) == 0x00000001);
29011       }
29012       /**
29013        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
29014        *
29015        * <pre>
29016        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
29017        * </pre>
29018        */
getMemstoreLoad()29019       public int getMemstoreLoad() {
29020         return memstoreLoad_;
29021       }
29022       /**
29023        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
29024        *
29025        * <pre>
29026        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
29027        * </pre>
29028        */
setMemstoreLoad(int value)29029       public Builder setMemstoreLoad(int value) {
29030         bitField0_ |= 0x00000001;
29031         memstoreLoad_ = value;
29032         onChanged();
29033         return this;
29034       }
29035       /**
29036        * <code>optional int32 memstoreLoad = 1 [default = 0];</code>
29037        *
29038        * <pre>
29039        * Percent load on the memstore. Guaranteed to be positive, between 0 and 100.
29040        * </pre>
29041        */
clearMemstoreLoad()29042       public Builder clearMemstoreLoad() {
29043         bitField0_ = (bitField0_ & ~0x00000001);
29044         memstoreLoad_ = 0;
29045         onChanged();
29046         return this;
29047       }
29048 
29049       // optional int32 heapOccupancy = 2 [default = 0];
29050       private int heapOccupancy_ ;
29051       /**
29052        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
29053        *
29054        * <pre>
29055        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
29056        * We can move this to "ServerLoadStats" should we develop them.
29057        * </pre>
29058        */
hasHeapOccupancy()29059       public boolean hasHeapOccupancy() {
29060         return ((bitField0_ & 0x00000002) == 0x00000002);
29061       }
29062       /**
29063        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
29064        *
29065        * <pre>
29066        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
29067        * We can move this to "ServerLoadStats" should we develop them.
29068        * </pre>
29069        */
getHeapOccupancy()29070       public int getHeapOccupancy() {
29071         return heapOccupancy_;
29072       }
29073       /**
29074        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
29075        *
29076        * <pre>
29077        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
29078        * We can move this to "ServerLoadStats" should we develop them.
29079        * </pre>
29080        */
setHeapOccupancy(int value)29081       public Builder setHeapOccupancy(int value) {
29082         bitField0_ |= 0x00000002;
29083         heapOccupancy_ = value;
29084         onChanged();
29085         return this;
29086       }
29087       /**
29088        * <code>optional int32 heapOccupancy = 2 [default = 0];</code>
29089        *
29090        * <pre>
29091        * Percent JVM heap occupancy. Guaranteed to be positive, between 0 and 100.
29092        * We can move this to "ServerLoadStats" should we develop them.
29093        * </pre>
29094        */
clearHeapOccupancy()29095       public Builder clearHeapOccupancy() {
29096         bitField0_ = (bitField0_ & ~0x00000002);
29097         heapOccupancy_ = 0;
29098         onChanged();
29099         return this;
29100       }
29101 
29102       // optional int32 compactionPressure = 3 [default = 0];
29103       private int compactionPressure_ ;
29104       /**
29105        * <code>optional int32 compactionPressure = 3 [default = 0];</code>
29106        *
29107        * <pre>
29108        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
29109        * </pre>
29110        */
hasCompactionPressure()29111       public boolean hasCompactionPressure() {
29112         return ((bitField0_ & 0x00000004) == 0x00000004);
29113       }
29114       /**
29115        * <code>optional int32 compactionPressure = 3 [default = 0];</code>
29116        *
29117        * <pre>
29118        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
29119        * </pre>
29120        */
getCompactionPressure()29121       public int getCompactionPressure() {
29122         return compactionPressure_;
29123       }
29124       /**
29125        * <code>optional int32 compactionPressure = 3 [default = 0];</code>
29126        *
29127        * <pre>
29128        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
29129        * </pre>
29130        */
setCompactionPressure(int value)29131       public Builder setCompactionPressure(int value) {
29132         bitField0_ |= 0x00000004;
29133         compactionPressure_ = value;
29134         onChanged();
29135         return this;
29136       }
29137       /**
29138        * <code>optional int32 compactionPressure = 3 [default = 0];</code>
29139        *
29140        * <pre>
29141        * Compaction pressure. Guaranteed to be positive, between 0 and 100.
29142        * </pre>
29143        */
clearCompactionPressure()29144       public Builder clearCompactionPressure() {
29145         bitField0_ = (bitField0_ & ~0x00000004);
29146         compactionPressure_ = 0;
29147         onChanged();
29148         return this;
29149       }
29150 
29151       // @@protoc_insertion_point(builder_scope:RegionLoadStats)
29152     }
29153 
29154     static {
29155       defaultInstance = new RegionLoadStats(true);
defaultInstance.initFields()29156       defaultInstance.initFields();
29157     }
29158 
29159     // @@protoc_insertion_point(class_scope:RegionLoadStats)
29160   }
29161 
29162   public interface ResultOrExceptionOrBuilder
29163       extends com.google.protobuf.MessageOrBuilder {
29164 
29165     // optional uint32 index = 1;
29166     /**
29167      * <code>optional uint32 index = 1;</code>
29168      *
29169      * <pre>
29170      * If part of a multi call, save original index of the list of all
29171      * passed so can align this response w/ original request.
29172      * </pre>
29173      */
hasIndex()29174     boolean hasIndex();
29175     /**
29176      * <code>optional uint32 index = 1;</code>
29177      *
29178      * <pre>
29179      * If part of a multi call, save original index of the list of all
29180      * passed so can align this response w/ original request.
29181      * </pre>
29182      */
getIndex()29183     int getIndex();
29184 
29185     // optional .Result result = 2;
29186     /**
29187      * <code>optional .Result result = 2;</code>
29188      */
hasResult()29189     boolean hasResult();
29190     /**
29191      * <code>optional .Result result = 2;</code>
29192      */
getResult()29193     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult();
29194     /**
29195      * <code>optional .Result result = 2;</code>
29196      */
getResultOrBuilder()29197     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder();
29198 
29199     // optional .NameBytesPair exception = 3;
29200     /**
29201      * <code>optional .NameBytesPair exception = 3;</code>
29202      */
hasException()29203     boolean hasException();
29204     /**
29205      * <code>optional .NameBytesPair exception = 3;</code>
29206      */
getException()29207     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
29208     /**
29209      * <code>optional .NameBytesPair exception = 3;</code>
29210      */
getExceptionOrBuilder()29211     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
29212 
29213     // optional .CoprocessorServiceResult service_result = 4;
29214     /**
29215      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
29216      *
29217      * <pre>
29218      * result if this was a coprocessor service call
29219      * </pre>
29220      */
hasServiceResult()29221     boolean hasServiceResult();
29222     /**
29223      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
29224      *
29225      * <pre>
29226      * result if this was a coprocessor service call
29227      * </pre>
29228      */
getServiceResult()29229     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult();
29230     /**
29231      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
29232      *
29233      * <pre>
29234      * result if this was a coprocessor service call
29235      * </pre>
29236      */
getServiceResultOrBuilder()29237     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder();
29238 
29239     // optional .RegionLoadStats loadStats = 5;
29240     /**
29241      * <code>optional .RegionLoadStats loadStats = 5;</code>
29242      *
29243      * <pre>
29244      * current load on the region
29245      * </pre>
29246      */
hasLoadStats()29247     boolean hasLoadStats();
29248     /**
29249      * <code>optional .RegionLoadStats loadStats = 5;</code>
29250      *
29251      * <pre>
29252      * current load on the region
29253      * </pre>
29254      */
getLoadStats()29255     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats();
29256     /**
29257      * <code>optional .RegionLoadStats loadStats = 5;</code>
29258      *
29259      * <pre>
29260      * current load on the region
29261      * </pre>
29262      */
getLoadStatsOrBuilder()29263     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder();
29264   }
29265   /**
29266    * Protobuf type {@code ResultOrException}
29267    *
29268    * <pre>
29269    **
29270    * Either a Result or an Exception NameBytesPair (keyed by
29271    * exception name whose value is the exception stringified)
29272    * or maybe empty if no result and no exception.
29273    * </pre>
29274    */
29275   public static final class ResultOrException extends
29276       com.google.protobuf.GeneratedMessage
29277       implements ResultOrExceptionOrBuilder {
29278     // Use ResultOrException.newBuilder() to construct.
ResultOrException(com.google.protobuf.GeneratedMessage.Builder<?> builder)29279     private ResultOrException(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
29280       super(builder);
29281       this.unknownFields = builder.getUnknownFields();
29282     }
ResultOrException(boolean noInit)29283     private ResultOrException(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
29284 
29285     private static final ResultOrException defaultInstance;
getDefaultInstance()29286     public static ResultOrException getDefaultInstance() {
29287       return defaultInstance;
29288     }
29289 
getDefaultInstanceForType()29290     public ResultOrException getDefaultInstanceForType() {
29291       return defaultInstance;
29292     }
29293 
29294     private final com.google.protobuf.UnknownFieldSet unknownFields;
29295     @java.lang.Override
29296     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()29297         getUnknownFields() {
29298       return this.unknownFields;
29299     }
ResultOrException( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29300     private ResultOrException(
29301         com.google.protobuf.CodedInputStream input,
29302         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29303         throws com.google.protobuf.InvalidProtocolBufferException {
29304       initFields();
29305       int mutable_bitField0_ = 0;
29306       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
29307           com.google.protobuf.UnknownFieldSet.newBuilder();
29308       try {
29309         boolean done = false;
29310         while (!done) {
29311           int tag = input.readTag();
29312           switch (tag) {
29313             case 0:
29314               done = true;
29315               break;
29316             default: {
29317               if (!parseUnknownField(input, unknownFields,
29318                                      extensionRegistry, tag)) {
29319                 done = true;
29320               }
29321               break;
29322             }
29323             case 8: {
29324               bitField0_ |= 0x00000001;
29325               index_ = input.readUInt32();
29326               break;
29327             }
29328             case 18: {
29329               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null;
29330               if (((bitField0_ & 0x00000002) == 0x00000002)) {
29331                 subBuilder = result_.toBuilder();
29332               }
29333               result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry);
29334               if (subBuilder != null) {
29335                 subBuilder.mergeFrom(result_);
29336                 result_ = subBuilder.buildPartial();
29337               }
29338               bitField0_ |= 0x00000002;
29339               break;
29340             }
29341             case 26: {
29342               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
29343               if (((bitField0_ & 0x00000004) == 0x00000004)) {
29344                 subBuilder = exception_.toBuilder();
29345               }
29346               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
29347               if (subBuilder != null) {
29348                 subBuilder.mergeFrom(exception_);
29349                 exception_ = subBuilder.buildPartial();
29350               }
29351               bitField0_ |= 0x00000004;
29352               break;
29353             }
29354             case 34: {
29355               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder subBuilder = null;
29356               if (((bitField0_ & 0x00000008) == 0x00000008)) {
29357                 subBuilder = serviceResult_.toBuilder();
29358               }
29359               serviceResult_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.PARSER, extensionRegistry);
29360               if (subBuilder != null) {
29361                 subBuilder.mergeFrom(serviceResult_);
29362                 serviceResult_ = subBuilder.buildPartial();
29363               }
29364               bitField0_ |= 0x00000008;
29365               break;
29366             }
29367             case 42: {
29368               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder subBuilder = null;
29369               if (((bitField0_ & 0x00000010) == 0x00000010)) {
29370                 subBuilder = loadStats_.toBuilder();
29371               }
29372               loadStats_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.PARSER, extensionRegistry);
29373               if (subBuilder != null) {
29374                 subBuilder.mergeFrom(loadStats_);
29375                 loadStats_ = subBuilder.buildPartial();
29376               }
29377               bitField0_ |= 0x00000010;
29378               break;
29379             }
29380           }
29381         }
29382       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29383         throw e.setUnfinishedMessage(this);
29384       } catch (java.io.IOException e) {
29385         throw new com.google.protobuf.InvalidProtocolBufferException(
29386             e.getMessage()).setUnfinishedMessage(this);
29387       } finally {
29388         this.unknownFields = unknownFields.build();
29389         makeExtensionsImmutable();
29390       }
29391     }
29392     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29393         getDescriptor() {
29394       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor;
29395     }
29396 
29397     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29398         internalGetFieldAccessorTable() {
29399       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_fieldAccessorTable
29400           .ensureFieldAccessorsInitialized(
29401               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
29402     }
29403 
29404     public static com.google.protobuf.Parser<ResultOrException> PARSER =
29405         new com.google.protobuf.AbstractParser<ResultOrException>() {
29406       public ResultOrException parsePartialFrom(
29407           com.google.protobuf.CodedInputStream input,
29408           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29409           throws com.google.protobuf.InvalidProtocolBufferException {
29410         return new ResultOrException(input, extensionRegistry);
29411       }
29412     };
29413 
29414     @java.lang.Override
getParserForType()29415     public com.google.protobuf.Parser<ResultOrException> getParserForType() {
29416       return PARSER;
29417     }
29418 
29419     private int bitField0_;
29420     // optional uint32 index = 1;
29421     public static final int INDEX_FIELD_NUMBER = 1;
29422     private int index_;
29423     /**
29424      * <code>optional uint32 index = 1;</code>
29425      *
29426      * <pre>
29427      * If part of a multi call, save original index of the list of all
29428      * passed so can align this response w/ original request.
29429      * </pre>
29430      */
hasIndex()29431     public boolean hasIndex() {
29432       return ((bitField0_ & 0x00000001) == 0x00000001);
29433     }
29434     /**
29435      * <code>optional uint32 index = 1;</code>
29436      *
29437      * <pre>
29438      * If part of a multi call, save original index of the list of all
29439      * passed so can align this response w/ original request.
29440      * </pre>
29441      */
getIndex()29442     public int getIndex() {
29443       return index_;
29444     }
29445 
29446     // optional .Result result = 2;
29447     public static final int RESULT_FIELD_NUMBER = 2;
29448     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_;
29449     /**
29450      * <code>optional .Result result = 2;</code>
29451      */
hasResult()29452     public boolean hasResult() {
29453       return ((bitField0_ & 0x00000002) == 0x00000002);
29454     }
29455     /**
29456      * <code>optional .Result result = 2;</code>
29457      */
getResult()29458     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
29459       return result_;
29460     }
29461     /**
29462      * <code>optional .Result result = 2;</code>
29463      */
getResultOrBuilder()29464     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
29465       return result_;
29466     }
29467 
29468     // optional .NameBytesPair exception = 3;
29469     public static final int EXCEPTION_FIELD_NUMBER = 3;
29470     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
29471     /**
29472      * <code>optional .NameBytesPair exception = 3;</code>
29473      */
hasException()29474     public boolean hasException() {
29475       return ((bitField0_ & 0x00000004) == 0x00000004);
29476     }
29477     /**
29478      * <code>optional .NameBytesPair exception = 3;</code>
29479      */
getException()29480     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
29481       return exception_;
29482     }
29483     /**
29484      * <code>optional .NameBytesPair exception = 3;</code>
29485      */
getExceptionOrBuilder()29486     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
29487       return exception_;
29488     }
29489 
29490     // optional .CoprocessorServiceResult service_result = 4;
29491     public static final int SERVICE_RESULT_FIELD_NUMBER = 4;
29492     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_;
29493     /**
29494      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
29495      *
29496      * <pre>
29497      * result if this was a coprocessor service call
29498      * </pre>
29499      */
hasServiceResult()29500     public boolean hasServiceResult() {
29501       return ((bitField0_ & 0x00000008) == 0x00000008);
29502     }
29503     /**
29504      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
29505      *
29506      * <pre>
29507      * result if this was a coprocessor service call
29508      * </pre>
29509      */
getServiceResult()29510     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
29511       return serviceResult_;
29512     }
29513     /**
29514      * <code>optional .CoprocessorServiceResult service_result = 4;</code>
29515      *
29516      * <pre>
29517      * result if this was a coprocessor service call
29518      * </pre>
29519      */
getServiceResultOrBuilder()29520     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
29521       return serviceResult_;
29522     }
29523 
29524     // optional .RegionLoadStats loadStats = 5;
29525     public static final int LOADSTATS_FIELD_NUMBER = 5;
29526     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_;
29527     /**
29528      * <code>optional .RegionLoadStats loadStats = 5;</code>
29529      *
29530      * <pre>
29531      * current load on the region
29532      * </pre>
29533      */
hasLoadStats()29534     public boolean hasLoadStats() {
29535       return ((bitField0_ & 0x00000010) == 0x00000010);
29536     }
29537     /**
29538      * <code>optional .RegionLoadStats loadStats = 5;</code>
29539      *
29540      * <pre>
29541      * current load on the region
29542      * </pre>
29543      */
getLoadStats()29544     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
29545       return loadStats_;
29546     }
29547     /**
29548      * <code>optional .RegionLoadStats loadStats = 5;</code>
29549      *
29550      * <pre>
29551      * current load on the region
29552      * </pre>
29553      */
getLoadStatsOrBuilder()29554     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
29555       return loadStats_;
29556     }
29557 
initFields()29558     private void initFields() {
29559       index_ = 0;
29560       result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
29561       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29562       serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
29563       loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
29564     }
29565     private byte memoizedIsInitialized = -1;
isInitialized()29566     public final boolean isInitialized() {
29567       byte isInitialized = memoizedIsInitialized;
29568       if (isInitialized != -1) return isInitialized == 1;
29569 
29570       if (hasException()) {
29571         if (!getException().isInitialized()) {
29572           memoizedIsInitialized = 0;
29573           return false;
29574         }
29575       }
29576       if (hasServiceResult()) {
29577         if (!getServiceResult().isInitialized()) {
29578           memoizedIsInitialized = 0;
29579           return false;
29580         }
29581       }
29582       memoizedIsInitialized = 1;
29583       return true;
29584     }
29585 
writeTo(com.google.protobuf.CodedOutputStream output)29586     public void writeTo(com.google.protobuf.CodedOutputStream output)
29587                         throws java.io.IOException {
29588       getSerializedSize();
29589       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29590         output.writeUInt32(1, index_);
29591       }
29592       if (((bitField0_ & 0x00000002) == 0x00000002)) {
29593         output.writeMessage(2, result_);
29594       }
29595       if (((bitField0_ & 0x00000004) == 0x00000004)) {
29596         output.writeMessage(3, exception_);
29597       }
29598       if (((bitField0_ & 0x00000008) == 0x00000008)) {
29599         output.writeMessage(4, serviceResult_);
29600       }
29601       if (((bitField0_ & 0x00000010) == 0x00000010)) {
29602         output.writeMessage(5, loadStats_);
29603       }
29604       getUnknownFields().writeTo(output);
29605     }
29606 
29607     private int memoizedSerializedSize = -1;
getSerializedSize()29608     public int getSerializedSize() {
29609       int size = memoizedSerializedSize;
29610       if (size != -1) return size;
29611 
29612       size = 0;
29613       if (((bitField0_ & 0x00000001) == 0x00000001)) {
29614         size += com.google.protobuf.CodedOutputStream
29615           .computeUInt32Size(1, index_);
29616       }
29617       if (((bitField0_ & 0x00000002) == 0x00000002)) {
29618         size += com.google.protobuf.CodedOutputStream
29619           .computeMessageSize(2, result_);
29620       }
29621       if (((bitField0_ & 0x00000004) == 0x00000004)) {
29622         size += com.google.protobuf.CodedOutputStream
29623           .computeMessageSize(3, exception_);
29624       }
29625       if (((bitField0_ & 0x00000008) == 0x00000008)) {
29626         size += com.google.protobuf.CodedOutputStream
29627           .computeMessageSize(4, serviceResult_);
29628       }
29629       if (((bitField0_ & 0x00000010) == 0x00000010)) {
29630         size += com.google.protobuf.CodedOutputStream
29631           .computeMessageSize(5, loadStats_);
29632       }
29633       size += getUnknownFields().getSerializedSize();
29634       memoizedSerializedSize = size;
29635       return size;
29636     }
29637 
29638     private static final long serialVersionUID = 0L;
29639     @java.lang.Override
writeReplace()29640     protected java.lang.Object writeReplace()
29641         throws java.io.ObjectStreamException {
29642       return super.writeReplace();
29643     }
29644 
29645     @java.lang.Override
equals(final java.lang.Object obj)29646     public boolean equals(final java.lang.Object obj) {
29647       if (obj == this) {
29648        return true;
29649       }
29650       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)) {
29651         return super.equals(obj);
29652       }
29653       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) obj;
29654 
29655       boolean result = true;
29656       result = result && (hasIndex() == other.hasIndex());
29657       if (hasIndex()) {
29658         result = result && (getIndex()
29659             == other.getIndex());
29660       }
29661       result = result && (hasResult() == other.hasResult());
29662       if (hasResult()) {
29663         result = result && getResult()
29664             .equals(other.getResult());
29665       }
29666       result = result && (hasException() == other.hasException());
29667       if (hasException()) {
29668         result = result && getException()
29669             .equals(other.getException());
29670       }
29671       result = result && (hasServiceResult() == other.hasServiceResult());
29672       if (hasServiceResult()) {
29673         result = result && getServiceResult()
29674             .equals(other.getServiceResult());
29675       }
29676       result = result && (hasLoadStats() == other.hasLoadStats());
29677       if (hasLoadStats()) {
29678         result = result && getLoadStats()
29679             .equals(other.getLoadStats());
29680       }
29681       result = result &&
29682           getUnknownFields().equals(other.getUnknownFields());
29683       return result;
29684     }
29685 
29686     private int memoizedHashCode = 0;
29687     @java.lang.Override
hashCode()29688     public int hashCode() {
29689       if (memoizedHashCode != 0) {
29690         return memoizedHashCode;
29691       }
29692       int hash = 41;
29693       hash = (19 * hash) + getDescriptorForType().hashCode();
29694       if (hasIndex()) {
29695         hash = (37 * hash) + INDEX_FIELD_NUMBER;
29696         hash = (53 * hash) + getIndex();
29697       }
29698       if (hasResult()) {
29699         hash = (37 * hash) + RESULT_FIELD_NUMBER;
29700         hash = (53 * hash) + getResult().hashCode();
29701       }
29702       if (hasException()) {
29703         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
29704         hash = (53 * hash) + getException().hashCode();
29705       }
29706       if (hasServiceResult()) {
29707         hash = (37 * hash) + SERVICE_RESULT_FIELD_NUMBER;
29708         hash = (53 * hash) + getServiceResult().hashCode();
29709       }
29710       if (hasLoadStats()) {
29711         hash = (37 * hash) + LOADSTATS_FIELD_NUMBER;
29712         hash = (53 * hash) + getLoadStats().hashCode();
29713       }
29714       hash = (29 * hash) + getUnknownFields().hashCode();
29715       memoizedHashCode = hash;
29716       return hash;
29717     }
29718 
parseFrom( com.google.protobuf.ByteString data)29719     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
29720         com.google.protobuf.ByteString data)
29721         throws com.google.protobuf.InvalidProtocolBufferException {
29722       return PARSER.parseFrom(data);
29723     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29724     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
29725         com.google.protobuf.ByteString data,
29726         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29727         throws com.google.protobuf.InvalidProtocolBufferException {
29728       return PARSER.parseFrom(data, extensionRegistry);
29729     }
parseFrom(byte[] data)29730     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(byte[] data)
29731         throws com.google.protobuf.InvalidProtocolBufferException {
29732       return PARSER.parseFrom(data);
29733     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29734     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
29735         byte[] data,
29736         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29737         throws com.google.protobuf.InvalidProtocolBufferException {
29738       return PARSER.parseFrom(data, extensionRegistry);
29739     }
parseFrom(java.io.InputStream input)29740     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(java.io.InputStream input)
29741         throws java.io.IOException {
29742       return PARSER.parseFrom(input);
29743     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29744     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
29745         java.io.InputStream input,
29746         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29747         throws java.io.IOException {
29748       return PARSER.parseFrom(input, extensionRegistry);
29749     }
parseDelimitedFrom(java.io.InputStream input)29750     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(java.io.InputStream input)
29751         throws java.io.IOException {
29752       return PARSER.parseDelimitedFrom(input);
29753     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29754     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseDelimitedFrom(
29755         java.io.InputStream input,
29756         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29757         throws java.io.IOException {
29758       return PARSER.parseDelimitedFrom(input, extensionRegistry);
29759     }
parseFrom( com.google.protobuf.CodedInputStream input)29760     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
29761         com.google.protobuf.CodedInputStream input)
29762         throws java.io.IOException {
29763       return PARSER.parseFrom(input);
29764     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29765     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parseFrom(
29766         com.google.protobuf.CodedInputStream input,
29767         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29768         throws java.io.IOException {
29769       return PARSER.parseFrom(input, extensionRegistry);
29770     }
29771 
newBuilder()29772     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()29773     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException prototype)29774     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException prototype) {
29775       return newBuilder().mergeFrom(prototype);
29776     }
toBuilder()29777     public Builder toBuilder() { return newBuilder(this); }
29778 
29779     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)29780     protected Builder newBuilderForType(
29781         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29782       Builder builder = new Builder(parent);
29783       return builder;
29784     }
29785     /**
29786      * Protobuf type {@code ResultOrException}
29787      *
29788      * <pre>
29789      **
29790      * Either a Result or an Exception NameBytesPair (keyed by
29791      * exception name whose value is the exception stringified)
29792      * or maybe empty if no result and no exception.
29793      * </pre>
29794      */
29795     public static final class Builder extends
29796         com.google.protobuf.GeneratedMessage.Builder<Builder>
29797        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder {
29798       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()29799           getDescriptor() {
29800         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor;
29801       }
29802 
29803       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()29804           internalGetFieldAccessorTable() {
29805         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_fieldAccessorTable
29806             .ensureFieldAccessorsInitialized(
29807                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder.class);
29808       }
29809 
29810       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.newBuilder()
Builder()29811       private Builder() {
29812         maybeForceBuilderInitialization();
29813       }
29814 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)29815       private Builder(
29816           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
29817         super(parent);
29818         maybeForceBuilderInitialization();
29819       }
maybeForceBuilderInitialization()29820       private void maybeForceBuilderInitialization() {
29821         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
29822           getResultFieldBuilder();
29823           getExceptionFieldBuilder();
29824           getServiceResultFieldBuilder();
29825           getLoadStatsFieldBuilder();
29826         }
29827       }
create()29828       private static Builder create() {
29829         return new Builder();
29830       }
29831 
clear()29832       public Builder clear() {
29833         super.clear();
29834         index_ = 0;
29835         bitField0_ = (bitField0_ & ~0x00000001);
29836         if (resultBuilder_ == null) {
29837           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
29838         } else {
29839           resultBuilder_.clear();
29840         }
29841         bitField0_ = (bitField0_ & ~0x00000002);
29842         if (exceptionBuilder_ == null) {
29843           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
29844         } else {
29845           exceptionBuilder_.clear();
29846         }
29847         bitField0_ = (bitField0_ & ~0x00000004);
29848         if (serviceResultBuilder_ == null) {
29849           serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
29850         } else {
29851           serviceResultBuilder_.clear();
29852         }
29853         bitField0_ = (bitField0_ & ~0x00000008);
29854         if (loadStatsBuilder_ == null) {
29855           loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
29856         } else {
29857           loadStatsBuilder_.clear();
29858         }
29859         bitField0_ = (bitField0_ & ~0x00000010);
29860         return this;
29861       }
29862 
clone()29863       public Builder clone() {
29864         return create().mergeFrom(buildPartial());
29865       }
29866 
29867       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()29868           getDescriptorForType() {
29869         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ResultOrException_descriptor;
29870       }
29871 
getDefaultInstanceForType()29872       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getDefaultInstanceForType() {
29873         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance();
29874       }
29875 
build()29876       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException build() {
29877         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = buildPartial();
29878         if (!result.isInitialized()) {
29879           throw newUninitializedMessageException(result);
29880         }
29881         return result;
29882       }
29883 
buildPartial()29884       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException buildPartial() {
29885         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException(this);
29886         int from_bitField0_ = bitField0_;
29887         int to_bitField0_ = 0;
29888         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
29889           to_bitField0_ |= 0x00000001;
29890         }
29891         result.index_ = index_;
29892         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
29893           to_bitField0_ |= 0x00000002;
29894         }
29895         if (resultBuilder_ == null) {
29896           result.result_ = result_;
29897         } else {
29898           result.result_ = resultBuilder_.build();
29899         }
29900         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
29901           to_bitField0_ |= 0x00000004;
29902         }
29903         if (exceptionBuilder_ == null) {
29904           result.exception_ = exception_;
29905         } else {
29906           result.exception_ = exceptionBuilder_.build();
29907         }
29908         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
29909           to_bitField0_ |= 0x00000008;
29910         }
29911         if (serviceResultBuilder_ == null) {
29912           result.serviceResult_ = serviceResult_;
29913         } else {
29914           result.serviceResult_ = serviceResultBuilder_.build();
29915         }
29916         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
29917           to_bitField0_ |= 0x00000010;
29918         }
29919         if (loadStatsBuilder_ == null) {
29920           result.loadStats_ = loadStats_;
29921         } else {
29922           result.loadStats_ = loadStatsBuilder_.build();
29923         }
29924         result.bitField0_ = to_bitField0_;
29925         onBuilt();
29926         return result;
29927       }
29928 
mergeFrom(com.google.protobuf.Message other)29929       public Builder mergeFrom(com.google.protobuf.Message other) {
29930         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) {
29931           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException)other);
29932         } else {
29933           super.mergeFrom(other);
29934           return this;
29935         }
29936       }
29937 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other)29938       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException other) {
29939         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance()) return this;
29940         if (other.hasIndex()) {
29941           setIndex(other.getIndex());
29942         }
29943         if (other.hasResult()) {
29944           mergeResult(other.getResult());
29945         }
29946         if (other.hasException()) {
29947           mergeException(other.getException());
29948         }
29949         if (other.hasServiceResult()) {
29950           mergeServiceResult(other.getServiceResult());
29951         }
29952         if (other.hasLoadStats()) {
29953           mergeLoadStats(other.getLoadStats());
29954         }
29955         this.mergeUnknownFields(other.getUnknownFields());
29956         return this;
29957       }
29958 
isInitialized()29959       public final boolean isInitialized() {
29960         if (hasException()) {
29961           if (!getException().isInitialized()) {
29962 
29963             return false;
29964           }
29965         }
29966         if (hasServiceResult()) {
29967           if (!getServiceResult().isInitialized()) {
29968 
29969             return false;
29970           }
29971         }
29972         return true;
29973       }
29974 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)29975       public Builder mergeFrom(
29976           com.google.protobuf.CodedInputStream input,
29977           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
29978           throws java.io.IOException {
29979         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException parsedMessage = null;
29980         try {
29981           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
29982         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
29983           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException) e.getUnfinishedMessage();
29984           throw e;
29985         } finally {
29986           if (parsedMessage != null) {
29987             mergeFrom(parsedMessage);
29988           }
29989         }
29990         return this;
29991       }
29992       private int bitField0_;
29993 
29994       // optional uint32 index = 1;
29995       private int index_ ;
29996       /**
29997        * <code>optional uint32 index = 1;</code>
29998        *
29999        * <pre>
30000        * If part of a multi call, save original index of the list of all
30001        * passed so can align this response w/ original request.
30002        * </pre>
30003        */
hasIndex()30004       public boolean hasIndex() {
30005         return ((bitField0_ & 0x00000001) == 0x00000001);
30006       }
30007       /**
30008        * <code>optional uint32 index = 1;</code>
30009        *
30010        * <pre>
30011        * If part of a multi call, save original index of the list of all
30012        * passed so can align this response w/ original request.
30013        * </pre>
30014        */
getIndex()30015       public int getIndex() {
30016         return index_;
30017       }
30018       /**
30019        * <code>optional uint32 index = 1;</code>
30020        *
30021        * <pre>
30022        * If part of a multi call, save original index of the list of all
30023        * passed so can align this response w/ original request.
30024        * </pre>
30025        */
setIndex(int value)30026       public Builder setIndex(int value) {
30027         bitField0_ |= 0x00000001;
30028         index_ = value;
30029         onChanged();
30030         return this;
30031       }
30032       /**
30033        * <code>optional uint32 index = 1;</code>
30034        *
30035        * <pre>
30036        * If part of a multi call, save original index of the list of all
30037        * passed so can align this response w/ original request.
30038        * </pre>
30039        */
clearIndex()30040       public Builder clearIndex() {
30041         bitField0_ = (bitField0_ & ~0x00000001);
30042         index_ = 0;
30043         onChanged();
30044         return this;
30045       }
30046 
30047       // optional .Result result = 2;
30048       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
30049       private com.google.protobuf.SingleFieldBuilder<
30050           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_;
30051       /**
30052        * <code>optional .Result result = 2;</code>
30053        */
hasResult()30054       public boolean hasResult() {
30055         return ((bitField0_ & 0x00000002) == 0x00000002);
30056       }
30057       /**
30058        * <code>optional .Result result = 2;</code>
30059        */
getResult()30060       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() {
30061         if (resultBuilder_ == null) {
30062           return result_;
30063         } else {
30064           return resultBuilder_.getMessage();
30065         }
30066       }
30067       /**
30068        * <code>optional .Result result = 2;</code>
30069        */
setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)30070       public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
30071         if (resultBuilder_ == null) {
30072           if (value == null) {
30073             throw new NullPointerException();
30074           }
30075           result_ = value;
30076           onChanged();
30077         } else {
30078           resultBuilder_.setMessage(value);
30079         }
30080         bitField0_ |= 0x00000002;
30081         return this;
30082       }
30083       /**
30084        * <code>optional .Result result = 2;</code>
30085        */
setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)30086       public Builder setResult(
30087           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) {
30088         if (resultBuilder_ == null) {
30089           result_ = builderForValue.build();
30090           onChanged();
30091         } else {
30092           resultBuilder_.setMessage(builderForValue.build());
30093         }
30094         bitField0_ |= 0x00000002;
30095         return this;
30096       }
30097       /**
30098        * <code>optional .Result result = 2;</code>
30099        */
mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)30100       public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) {
30101         if (resultBuilder_ == null) {
30102           if (((bitField0_ & 0x00000002) == 0x00000002) &&
30103               result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) {
30104             result_ =
30105               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial();
30106           } else {
30107             result_ = value;
30108           }
30109           onChanged();
30110         } else {
30111           resultBuilder_.mergeFrom(value);
30112         }
30113         bitField0_ |= 0x00000002;
30114         return this;
30115       }
30116       /**
30117        * <code>optional .Result result = 2;</code>
30118        */
clearResult()30119       public Builder clearResult() {
30120         if (resultBuilder_ == null) {
30121           result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance();
30122           onChanged();
30123         } else {
30124           resultBuilder_.clear();
30125         }
30126         bitField0_ = (bitField0_ & ~0x00000002);
30127         return this;
30128       }
30129       /**
30130        * <code>optional .Result result = 2;</code>
30131        */
getResultBuilder()30132       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() {
30133         bitField0_ |= 0x00000002;
30134         onChanged();
30135         return getResultFieldBuilder().getBuilder();
30136       }
30137       /**
30138        * <code>optional .Result result = 2;</code>
30139        */
getResultOrBuilder()30140       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() {
30141         if (resultBuilder_ != null) {
30142           return resultBuilder_.getMessageOrBuilder();
30143         } else {
30144           return result_;
30145         }
30146       }
30147       /**
30148        * <code>optional .Result result = 2;</code>
30149        */
30150       private com.google.protobuf.SingleFieldBuilder<
30151           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>
getResultFieldBuilder()30152           getResultFieldBuilder() {
30153         if (resultBuilder_ == null) {
30154           resultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
30155               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>(
30156                   result_,
30157                   getParentForChildren(),
30158                   isClean());
30159           result_ = null;
30160         }
30161         return resultBuilder_;
30162       }
30163 
30164       // optional .NameBytesPair exception = 3;
30165       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30166       private com.google.protobuf.SingleFieldBuilder<
30167           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
30168       /**
30169        * <code>optional .NameBytesPair exception = 3;</code>
30170        */
hasException()30171       public boolean hasException() {
30172         return ((bitField0_ & 0x00000004) == 0x00000004);
30173       }
30174       /**
30175        * <code>optional .NameBytesPair exception = 3;</code>
30176        */
getException()30177       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
30178         if (exceptionBuilder_ == null) {
30179           return exception_;
30180         } else {
30181           return exceptionBuilder_.getMessage();
30182         }
30183       }
30184       /**
30185        * <code>optional .NameBytesPair exception = 3;</code>
30186        */
setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)30187       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
30188         if (exceptionBuilder_ == null) {
30189           if (value == null) {
30190             throw new NullPointerException();
30191           }
30192           exception_ = value;
30193           onChanged();
30194         } else {
30195           exceptionBuilder_.setMessage(value);
30196         }
30197         bitField0_ |= 0x00000004;
30198         return this;
30199       }
30200       /**
30201        * <code>optional .NameBytesPair exception = 3;</code>
30202        */
setException( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)30203       public Builder setException(
30204           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
30205         if (exceptionBuilder_ == null) {
30206           exception_ = builderForValue.build();
30207           onChanged();
30208         } else {
30209           exceptionBuilder_.setMessage(builderForValue.build());
30210         }
30211         bitField0_ |= 0x00000004;
30212         return this;
30213       }
30214       /**
30215        * <code>optional .NameBytesPair exception = 3;</code>
30216        */
mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)30217       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
30218         if (exceptionBuilder_ == null) {
30219           if (((bitField0_ & 0x00000004) == 0x00000004) &&
30220               exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
30221             exception_ =
30222               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
30223           } else {
30224             exception_ = value;
30225           }
30226           onChanged();
30227         } else {
30228           exceptionBuilder_.mergeFrom(value);
30229         }
30230         bitField0_ |= 0x00000004;
30231         return this;
30232       }
30233       /**
30234        * <code>optional .NameBytesPair exception = 3;</code>
30235        */
clearException()30236       public Builder clearException() {
30237         if (exceptionBuilder_ == null) {
30238           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30239           onChanged();
30240         } else {
30241           exceptionBuilder_.clear();
30242         }
30243         bitField0_ = (bitField0_ & ~0x00000004);
30244         return this;
30245       }
30246       /**
30247        * <code>optional .NameBytesPair exception = 3;</code>
30248        */
getExceptionBuilder()30249       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
30250         bitField0_ |= 0x00000004;
30251         onChanged();
30252         return getExceptionFieldBuilder().getBuilder();
30253       }
30254       /**
30255        * <code>optional .NameBytesPair exception = 3;</code>
30256        */
getExceptionOrBuilder()30257       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
30258         if (exceptionBuilder_ != null) {
30259           return exceptionBuilder_.getMessageOrBuilder();
30260         } else {
30261           return exception_;
30262         }
30263       }
30264       /**
30265        * <code>optional .NameBytesPair exception = 3;</code>
30266        */
30267       private com.google.protobuf.SingleFieldBuilder<
30268           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getExceptionFieldBuilder()30269           getExceptionFieldBuilder() {
30270         if (exceptionBuilder_ == null) {
30271           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
30272               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
30273                   exception_,
30274                   getParentForChildren(),
30275                   isClean());
30276           exception_ = null;
30277         }
30278         return exceptionBuilder_;
30279       }
30280 
30281       // optional .CoprocessorServiceResult service_result = 4;
30282       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
30283       private com.google.protobuf.SingleFieldBuilder<
30284           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder> serviceResultBuilder_;
30285       /**
30286        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30287        *
30288        * <pre>
30289        * result if this was a coprocessor service call
30290        * </pre>
30291        */
hasServiceResult()30292       public boolean hasServiceResult() {
30293         return ((bitField0_ & 0x00000008) == 0x00000008);
30294       }
30295       /**
30296        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30297        *
30298        * <pre>
30299        * result if this was a coprocessor service call
30300        * </pre>
30301        */
getServiceResult()30302       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult getServiceResult() {
30303         if (serviceResultBuilder_ == null) {
30304           return serviceResult_;
30305         } else {
30306           return serviceResultBuilder_.getMessage();
30307         }
30308       }
30309       /**
30310        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30311        *
30312        * <pre>
30313        * result if this was a coprocessor service call
30314        * </pre>
30315        */
setServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value)30316       public Builder setServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
30317         if (serviceResultBuilder_ == null) {
30318           if (value == null) {
30319             throw new NullPointerException();
30320           }
30321           serviceResult_ = value;
30322           onChanged();
30323         } else {
30324           serviceResultBuilder_.setMessage(value);
30325         }
30326         bitField0_ |= 0x00000008;
30327         return this;
30328       }
30329       /**
30330        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30331        *
30332        * <pre>
30333        * result if this was a coprocessor service call
30334        * </pre>
30335        */
setServiceResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue)30336       public Builder setServiceResult(
30337           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder builderForValue) {
30338         if (serviceResultBuilder_ == null) {
30339           serviceResult_ = builderForValue.build();
30340           onChanged();
30341         } else {
30342           serviceResultBuilder_.setMessage(builderForValue.build());
30343         }
30344         bitField0_ |= 0x00000008;
30345         return this;
30346       }
30347       /**
30348        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30349        *
30350        * <pre>
30351        * result if this was a coprocessor service call
30352        * </pre>
30353        */
mergeServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value)30354       public Builder mergeServiceResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult value) {
30355         if (serviceResultBuilder_ == null) {
30356           if (((bitField0_ & 0x00000008) == 0x00000008) &&
30357               serviceResult_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance()) {
30358             serviceResult_ =
30359               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.newBuilder(serviceResult_).mergeFrom(value).buildPartial();
30360           } else {
30361             serviceResult_ = value;
30362           }
30363           onChanged();
30364         } else {
30365           serviceResultBuilder_.mergeFrom(value);
30366         }
30367         bitField0_ |= 0x00000008;
30368         return this;
30369       }
30370       /**
30371        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30372        *
30373        * <pre>
30374        * result if this was a coprocessor service call
30375        * </pre>
30376        */
clearServiceResult()30377       public Builder clearServiceResult() {
30378         if (serviceResultBuilder_ == null) {
30379           serviceResult_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.getDefaultInstance();
30380           onChanged();
30381         } else {
30382           serviceResultBuilder_.clear();
30383         }
30384         bitField0_ = (bitField0_ & ~0x00000008);
30385         return this;
30386       }
30387       /**
30388        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30389        *
30390        * <pre>
30391        * result if this was a coprocessor service call
30392        * </pre>
30393        */
getServiceResultBuilder()30394       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder getServiceResultBuilder() {
30395         bitField0_ |= 0x00000008;
30396         onChanged();
30397         return getServiceResultFieldBuilder().getBuilder();
30398       }
30399       /**
30400        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30401        *
30402        * <pre>
30403        * result if this was a coprocessor service call
30404        * </pre>
30405        */
getServiceResultOrBuilder()30406       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder getServiceResultOrBuilder() {
30407         if (serviceResultBuilder_ != null) {
30408           return serviceResultBuilder_.getMessageOrBuilder();
30409         } else {
30410           return serviceResult_;
30411         }
30412       }
30413       /**
30414        * <code>optional .CoprocessorServiceResult service_result = 4;</code>
30415        *
30416        * <pre>
30417        * result if this was a coprocessor service call
30418        * </pre>
30419        */
30420       private com.google.protobuf.SingleFieldBuilder<
30421           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>
getServiceResultFieldBuilder()30422           getServiceResultFieldBuilder() {
30423         if (serviceResultBuilder_ == null) {
30424           serviceResultBuilder_ = new com.google.protobuf.SingleFieldBuilder<
30425               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResultOrBuilder>(
30426                   serviceResult_,
30427                   getParentForChildren(),
30428                   isClean());
30429           serviceResult_ = null;
30430         }
30431         return serviceResultBuilder_;
30432       }
30433 
30434       // optional .RegionLoadStats loadStats = 5;
30435       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
30436       private com.google.protobuf.SingleFieldBuilder<
30437           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder> loadStatsBuilder_;
30438       /**
30439        * <code>optional .RegionLoadStats loadStats = 5;</code>
30440        *
30441        * <pre>
30442        * current load on the region
30443        * </pre>
30444        */
hasLoadStats()30445       public boolean hasLoadStats() {
30446         return ((bitField0_ & 0x00000010) == 0x00000010);
30447       }
30448       /**
30449        * <code>optional .RegionLoadStats loadStats = 5;</code>
30450        *
30451        * <pre>
30452        * current load on the region
30453        * </pre>
30454        */
getLoadStats()30455       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats getLoadStats() {
30456         if (loadStatsBuilder_ == null) {
30457           return loadStats_;
30458         } else {
30459           return loadStatsBuilder_.getMessage();
30460         }
30461       }
30462       /**
30463        * <code>optional .RegionLoadStats loadStats = 5;</code>
30464        *
30465        * <pre>
30466        * current load on the region
30467        * </pre>
30468        */
setLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value)30469       public Builder setLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
30470         if (loadStatsBuilder_ == null) {
30471           if (value == null) {
30472             throw new NullPointerException();
30473           }
30474           loadStats_ = value;
30475           onChanged();
30476         } else {
30477           loadStatsBuilder_.setMessage(value);
30478         }
30479         bitField0_ |= 0x00000010;
30480         return this;
30481       }
30482       /**
30483        * <code>optional .RegionLoadStats loadStats = 5;</code>
30484        *
30485        * <pre>
30486        * current load on the region
30487        * </pre>
30488        */
setLoadStats( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue)30489       public Builder setLoadStats(
30490           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder builderForValue) {
30491         if (loadStatsBuilder_ == null) {
30492           loadStats_ = builderForValue.build();
30493           onChanged();
30494         } else {
30495           loadStatsBuilder_.setMessage(builderForValue.build());
30496         }
30497         bitField0_ |= 0x00000010;
30498         return this;
30499       }
30500       /**
30501        * <code>optional .RegionLoadStats loadStats = 5;</code>
30502        *
30503        * <pre>
30504        * current load on the region
30505        * </pre>
30506        */
mergeLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value)30507       public Builder mergeLoadStats(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats value) {
30508         if (loadStatsBuilder_ == null) {
30509           if (((bitField0_ & 0x00000010) == 0x00000010) &&
30510               loadStats_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance()) {
30511             loadStats_ =
30512               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.newBuilder(loadStats_).mergeFrom(value).buildPartial();
30513           } else {
30514             loadStats_ = value;
30515           }
30516           onChanged();
30517         } else {
30518           loadStatsBuilder_.mergeFrom(value);
30519         }
30520         bitField0_ |= 0x00000010;
30521         return this;
30522       }
30523       /**
30524        * <code>optional .RegionLoadStats loadStats = 5;</code>
30525        *
30526        * <pre>
30527        * current load on the region
30528        * </pre>
30529        */
clearLoadStats()30530       public Builder clearLoadStats() {
30531         if (loadStatsBuilder_ == null) {
30532           loadStats_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.getDefaultInstance();
30533           onChanged();
30534         } else {
30535           loadStatsBuilder_.clear();
30536         }
30537         bitField0_ = (bitField0_ & ~0x00000010);
30538         return this;
30539       }
30540       /**
30541        * <code>optional .RegionLoadStats loadStats = 5;</code>
30542        *
30543        * <pre>
30544        * current load on the region
30545        * </pre>
30546        */
getLoadStatsBuilder()30547       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder getLoadStatsBuilder() {
30548         bitField0_ |= 0x00000010;
30549         onChanged();
30550         return getLoadStatsFieldBuilder().getBuilder();
30551       }
30552       /**
30553        * <code>optional .RegionLoadStats loadStats = 5;</code>
30554        *
30555        * <pre>
30556        * current load on the region
30557        * </pre>
30558        */
getLoadStatsOrBuilder()30559       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder getLoadStatsOrBuilder() {
30560         if (loadStatsBuilder_ != null) {
30561           return loadStatsBuilder_.getMessageOrBuilder();
30562         } else {
30563           return loadStats_;
30564         }
30565       }
30566       /**
30567        * <code>optional .RegionLoadStats loadStats = 5;</code>
30568        *
30569        * <pre>
30570        * current load on the region
30571        * </pre>
30572        */
30573       private com.google.protobuf.SingleFieldBuilder<
30574           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>
getLoadStatsFieldBuilder()30575           getLoadStatsFieldBuilder() {
30576         if (loadStatsBuilder_ == null) {
30577           loadStatsBuilder_ = new com.google.protobuf.SingleFieldBuilder<
30578               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStats.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionLoadStatsOrBuilder>(
30579                   loadStats_,
30580                   getParentForChildren(),
30581                   isClean());
30582           loadStats_ = null;
30583         }
30584         return loadStatsBuilder_;
30585       }
30586 
30587       // @@protoc_insertion_point(builder_scope:ResultOrException)
30588     }
30589 
30590     static {
30591       defaultInstance = new ResultOrException(true);
defaultInstance.initFields()30592       defaultInstance.initFields();
30593     }
30594 
30595     // @@protoc_insertion_point(class_scope:ResultOrException)
30596   }
30597 
30598   public interface RegionActionResultOrBuilder
30599       extends com.google.protobuf.MessageOrBuilder {
30600 
30601     // repeated .ResultOrException resultOrException = 1;
30602     /**
30603      * <code>repeated .ResultOrException resultOrException = 1;</code>
30604      */
30605     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>
getResultOrExceptionList()30606         getResultOrExceptionList();
30607     /**
30608      * <code>repeated .ResultOrException resultOrException = 1;</code>
30609      */
getResultOrException(int index)30610     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index);
30611     /**
30612      * <code>repeated .ResultOrException resultOrException = 1;</code>
30613      */
getResultOrExceptionCount()30614     int getResultOrExceptionCount();
30615     /**
30616      * <code>repeated .ResultOrException resultOrException = 1;</code>
30617      */
30618     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionOrBuilderList()30619         getResultOrExceptionOrBuilderList();
30620     /**
30621      * <code>repeated .ResultOrException resultOrException = 1;</code>
30622      */
getResultOrExceptionOrBuilder( int index)30623     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
30624         int index);
30625 
30626     // optional .NameBytesPair exception = 2;
30627     /**
30628      * <code>optional .NameBytesPair exception = 2;</code>
30629      *
30630      * <pre>
30631      * If the operation failed globally for this region, this exception is set
30632      * </pre>
30633      */
hasException()30634     boolean hasException();
30635     /**
30636      * <code>optional .NameBytesPair exception = 2;</code>
30637      *
30638      * <pre>
30639      * If the operation failed globally for this region, this exception is set
30640      * </pre>
30641      */
getException()30642     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException();
30643     /**
30644      * <code>optional .NameBytesPair exception = 2;</code>
30645      *
30646      * <pre>
30647      * If the operation failed globally for this region, this exception is set
30648      * </pre>
30649      */
getExceptionOrBuilder()30650     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder();
30651   }
30652   /**
30653    * Protobuf type {@code RegionActionResult}
30654    *
30655    * <pre>
30656    **
30657    * The result of a RegionAction.
30658    * </pre>
30659    */
30660   public static final class RegionActionResult extends
30661       com.google.protobuf.GeneratedMessage
30662       implements RegionActionResultOrBuilder {
30663     // Use RegionActionResult.newBuilder() to construct.
RegionActionResult(com.google.protobuf.GeneratedMessage.Builder<?> builder)30664     private RegionActionResult(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
30665       super(builder);
30666       this.unknownFields = builder.getUnknownFields();
30667     }
RegionActionResult(boolean noInit)30668     private RegionActionResult(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
30669 
30670     private static final RegionActionResult defaultInstance;
getDefaultInstance()30671     public static RegionActionResult getDefaultInstance() {
30672       return defaultInstance;
30673     }
30674 
getDefaultInstanceForType()30675     public RegionActionResult getDefaultInstanceForType() {
30676       return defaultInstance;
30677     }
30678 
30679     private final com.google.protobuf.UnknownFieldSet unknownFields;
30680     @java.lang.Override
30681     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()30682         getUnknownFields() {
30683       return this.unknownFields;
30684     }
RegionActionResult( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30685     private RegionActionResult(
30686         com.google.protobuf.CodedInputStream input,
30687         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30688         throws com.google.protobuf.InvalidProtocolBufferException {
30689       initFields();
30690       int mutable_bitField0_ = 0;
30691       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
30692           com.google.protobuf.UnknownFieldSet.newBuilder();
30693       try {
30694         boolean done = false;
30695         while (!done) {
30696           int tag = input.readTag();
30697           switch (tag) {
30698             case 0:
30699               done = true;
30700               break;
30701             default: {
30702               if (!parseUnknownField(input, unknownFields,
30703                                      extensionRegistry, tag)) {
30704                 done = true;
30705               }
30706               break;
30707             }
30708             case 10: {
30709               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
30710                 resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>();
30711                 mutable_bitField0_ |= 0x00000001;
30712               }
30713               resultOrException_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.PARSER, extensionRegistry));
30714               break;
30715             }
30716             case 18: {
30717               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = null;
30718               if (((bitField0_ & 0x00000001) == 0x00000001)) {
30719                 subBuilder = exception_.toBuilder();
30720               }
30721               exception_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry);
30722               if (subBuilder != null) {
30723                 subBuilder.mergeFrom(exception_);
30724                 exception_ = subBuilder.buildPartial();
30725               }
30726               bitField0_ |= 0x00000001;
30727               break;
30728             }
30729           }
30730         }
30731       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
30732         throw e.setUnfinishedMessage(this);
30733       } catch (java.io.IOException e) {
30734         throw new com.google.protobuf.InvalidProtocolBufferException(
30735             e.getMessage()).setUnfinishedMessage(this);
30736       } finally {
30737         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
30738           resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
30739         }
30740         this.unknownFields = unknownFields.build();
30741         makeExtensionsImmutable();
30742       }
30743     }
30744     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()30745         getDescriptor() {
30746       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_descriptor;
30747     }
30748 
30749     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()30750         internalGetFieldAccessorTable() {
30751       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_fieldAccessorTable
30752           .ensureFieldAccessorsInitialized(
30753               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
30754     }
30755 
30756     public static com.google.protobuf.Parser<RegionActionResult> PARSER =
30757         new com.google.protobuf.AbstractParser<RegionActionResult>() {
30758       public RegionActionResult parsePartialFrom(
30759           com.google.protobuf.CodedInputStream input,
30760           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30761           throws com.google.protobuf.InvalidProtocolBufferException {
30762         return new RegionActionResult(input, extensionRegistry);
30763       }
30764     };
30765 
30766     @java.lang.Override
getParserForType()30767     public com.google.protobuf.Parser<RegionActionResult> getParserForType() {
30768       return PARSER;
30769     }
30770 
30771     private int bitField0_;
30772     // repeated .ResultOrException resultOrException = 1;
30773     public static final int RESULTOREXCEPTION_FIELD_NUMBER = 1;
30774     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_;
30775     /**
30776      * <code>repeated .ResultOrException resultOrException = 1;</code>
30777      */
getResultOrExceptionList()30778     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
30779       return resultOrException_;
30780     }
30781     /**
30782      * <code>repeated .ResultOrException resultOrException = 1;</code>
30783      */
30784     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionOrBuilderList()30785         getResultOrExceptionOrBuilderList() {
30786       return resultOrException_;
30787     }
30788     /**
30789      * <code>repeated .ResultOrException resultOrException = 1;</code>
30790      */
getResultOrExceptionCount()30791     public int getResultOrExceptionCount() {
30792       return resultOrException_.size();
30793     }
30794     /**
30795      * <code>repeated .ResultOrException resultOrException = 1;</code>
30796      */
getResultOrException(int index)30797     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
30798       return resultOrException_.get(index);
30799     }
30800     /**
30801      * <code>repeated .ResultOrException resultOrException = 1;</code>
30802      */
getResultOrExceptionOrBuilder( int index)30803     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
30804         int index) {
30805       return resultOrException_.get(index);
30806     }
30807 
30808     // optional .NameBytesPair exception = 2;
30809     public static final int EXCEPTION_FIELD_NUMBER = 2;
30810     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_;
30811     /**
30812      * <code>optional .NameBytesPair exception = 2;</code>
30813      *
30814      * <pre>
30815      * If the operation failed globally for this region, this exception is set
30816      * </pre>
30817      */
hasException()30818     public boolean hasException() {
30819       return ((bitField0_ & 0x00000001) == 0x00000001);
30820     }
30821     /**
30822      * <code>optional .NameBytesPair exception = 2;</code>
30823      *
30824      * <pre>
30825      * If the operation failed globally for this region, this exception is set
30826      * </pre>
30827      */
getException()30828     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
30829       return exception_;
30830     }
30831     /**
30832      * <code>optional .NameBytesPair exception = 2;</code>
30833      *
30834      * <pre>
30835      * If the operation failed globally for this region, this exception is set
30836      * </pre>
30837      */
getExceptionOrBuilder()30838     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
30839       return exception_;
30840     }
30841 
initFields()30842     private void initFields() {
30843       resultOrException_ = java.util.Collections.emptyList();
30844       exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
30845     }
30846     private byte memoizedIsInitialized = -1;
isInitialized()30847     public final boolean isInitialized() {
30848       byte isInitialized = memoizedIsInitialized;
30849       if (isInitialized != -1) return isInitialized == 1;
30850 
30851       for (int i = 0; i < getResultOrExceptionCount(); i++) {
30852         if (!getResultOrException(i).isInitialized()) {
30853           memoizedIsInitialized = 0;
30854           return false;
30855         }
30856       }
30857       if (hasException()) {
30858         if (!getException().isInitialized()) {
30859           memoizedIsInitialized = 0;
30860           return false;
30861         }
30862       }
30863       memoizedIsInitialized = 1;
30864       return true;
30865     }
30866 
writeTo(com.google.protobuf.CodedOutputStream output)30867     public void writeTo(com.google.protobuf.CodedOutputStream output)
30868                         throws java.io.IOException {
30869       getSerializedSize();
30870       for (int i = 0; i < resultOrException_.size(); i++) {
30871         output.writeMessage(1, resultOrException_.get(i));
30872       }
30873       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30874         output.writeMessage(2, exception_);
30875       }
30876       getUnknownFields().writeTo(output);
30877     }
30878 
30879     private int memoizedSerializedSize = -1;
getSerializedSize()30880     public int getSerializedSize() {
30881       int size = memoizedSerializedSize;
30882       if (size != -1) return size;
30883 
30884       size = 0;
30885       for (int i = 0; i < resultOrException_.size(); i++) {
30886         size += com.google.protobuf.CodedOutputStream
30887           .computeMessageSize(1, resultOrException_.get(i));
30888       }
30889       if (((bitField0_ & 0x00000001) == 0x00000001)) {
30890         size += com.google.protobuf.CodedOutputStream
30891           .computeMessageSize(2, exception_);
30892       }
30893       size += getUnknownFields().getSerializedSize();
30894       memoizedSerializedSize = size;
30895       return size;
30896     }
30897 
30898     private static final long serialVersionUID = 0L;
30899     @java.lang.Override
writeReplace()30900     protected java.lang.Object writeReplace()
30901         throws java.io.ObjectStreamException {
30902       return super.writeReplace();
30903     }
30904 
30905     @java.lang.Override
equals(final java.lang.Object obj)30906     public boolean equals(final java.lang.Object obj) {
30907       if (obj == this) {
30908        return true;
30909       }
30910       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)) {
30911         return super.equals(obj);
30912       }
30913       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) obj;
30914 
30915       boolean result = true;
30916       result = result && getResultOrExceptionList()
30917           .equals(other.getResultOrExceptionList());
30918       result = result && (hasException() == other.hasException());
30919       if (hasException()) {
30920         result = result && getException()
30921             .equals(other.getException());
30922       }
30923       result = result &&
30924           getUnknownFields().equals(other.getUnknownFields());
30925       return result;
30926     }
30927 
30928     private int memoizedHashCode = 0;
30929     @java.lang.Override
hashCode()30930     public int hashCode() {
30931       if (memoizedHashCode != 0) {
30932         return memoizedHashCode;
30933       }
30934       int hash = 41;
30935       hash = (19 * hash) + getDescriptorForType().hashCode();
30936       if (getResultOrExceptionCount() > 0) {
30937         hash = (37 * hash) + RESULTOREXCEPTION_FIELD_NUMBER;
30938         hash = (53 * hash) + getResultOrExceptionList().hashCode();
30939       }
30940       if (hasException()) {
30941         hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
30942         hash = (53 * hash) + getException().hashCode();
30943       }
30944       hash = (29 * hash) + getUnknownFields().hashCode();
30945       memoizedHashCode = hash;
30946       return hash;
30947     }
30948 
parseFrom( com.google.protobuf.ByteString data)30949     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
30950         com.google.protobuf.ByteString data)
30951         throws com.google.protobuf.InvalidProtocolBufferException {
30952       return PARSER.parseFrom(data);
30953     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30954     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
30955         com.google.protobuf.ByteString data,
30956         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30957         throws com.google.protobuf.InvalidProtocolBufferException {
30958       return PARSER.parseFrom(data, extensionRegistry);
30959     }
parseFrom(byte[] data)30960     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(byte[] data)
30961         throws com.google.protobuf.InvalidProtocolBufferException {
30962       return PARSER.parseFrom(data);
30963     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30964     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
30965         byte[] data,
30966         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30967         throws com.google.protobuf.InvalidProtocolBufferException {
30968       return PARSER.parseFrom(data, extensionRegistry);
30969     }
parseFrom(java.io.InputStream input)30970     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(java.io.InputStream input)
30971         throws java.io.IOException {
30972       return PARSER.parseFrom(input);
30973     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30974     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
30975         java.io.InputStream input,
30976         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30977         throws java.io.IOException {
30978       return PARSER.parseFrom(input, extensionRegistry);
30979     }
parseDelimitedFrom(java.io.InputStream input)30980     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(java.io.InputStream input)
30981         throws java.io.IOException {
30982       return PARSER.parseDelimitedFrom(input);
30983     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30984     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseDelimitedFrom(
30985         java.io.InputStream input,
30986         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30987         throws java.io.IOException {
30988       return PARSER.parseDelimitedFrom(input, extensionRegistry);
30989     }
parseFrom( com.google.protobuf.CodedInputStream input)30990     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
30991         com.google.protobuf.CodedInputStream input)
30992         throws java.io.IOException {
30993       return PARSER.parseFrom(input);
30994     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)30995     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parseFrom(
30996         com.google.protobuf.CodedInputStream input,
30997         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
30998         throws java.io.IOException {
30999       return PARSER.parseFrom(input, extensionRegistry);
31000     }
31001 
newBuilder()31002     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()31003     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult prototype)31004     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult prototype) {
31005       return newBuilder().mergeFrom(prototype);
31006     }
toBuilder()31007     public Builder toBuilder() { return newBuilder(this); }
31008 
31009     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)31010     protected Builder newBuilderForType(
31011         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31012       Builder builder = new Builder(parent);
31013       return builder;
31014     }
31015     /**
31016      * Protobuf type {@code RegionActionResult}
31017      *
31018      * <pre>
31019      **
31020      * The result of a RegionAction.
31021      * </pre>
31022      */
31023     public static final class Builder extends
31024         com.google.protobuf.GeneratedMessage.Builder<Builder>
31025        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder {
31026       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31027           getDescriptor() {
31028         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_descriptor;
31029       }
31030 
31031       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31032           internalGetFieldAccessorTable() {
31033         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_fieldAccessorTable
31034             .ensureFieldAccessorsInitialized(
31035                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder.class);
31036       }
31037 
31038       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.newBuilder()
Builder()31039       private Builder() {
31040         maybeForceBuilderInitialization();
31041       }
31042 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)31043       private Builder(
31044           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
31045         super(parent);
31046         maybeForceBuilderInitialization();
31047       }
maybeForceBuilderInitialization()31048       private void maybeForceBuilderInitialization() {
31049         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
31050           getResultOrExceptionFieldBuilder();
31051           getExceptionFieldBuilder();
31052         }
31053       }
create()31054       private static Builder create() {
31055         return new Builder();
31056       }
31057 
clear()31058       public Builder clear() {
31059         super.clear();
31060         if (resultOrExceptionBuilder_ == null) {
31061           resultOrException_ = java.util.Collections.emptyList();
31062           bitField0_ = (bitField0_ & ~0x00000001);
31063         } else {
31064           resultOrExceptionBuilder_.clear();
31065         }
31066         if (exceptionBuilder_ == null) {
31067           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
31068         } else {
31069           exceptionBuilder_.clear();
31070         }
31071         bitField0_ = (bitField0_ & ~0x00000002);
31072         return this;
31073       }
31074 
clone()31075       public Builder clone() {
31076         return create().mergeFrom(buildPartial());
31077       }
31078 
31079       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()31080           getDescriptorForType() {
31081         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_RegionActionResult_descriptor;
31082       }
31083 
getDefaultInstanceForType()31084       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getDefaultInstanceForType() {
31085         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance();
31086       }
31087 
build()31088       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult build() {
31089         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = buildPartial();
31090         if (!result.isInitialized()) {
31091           throw newUninitializedMessageException(result);
31092         }
31093         return result;
31094       }
31095 
buildPartial()31096       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult buildPartial() {
31097         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult(this);
31098         int from_bitField0_ = bitField0_;
31099         int to_bitField0_ = 0;
31100         if (resultOrExceptionBuilder_ == null) {
31101           if (((bitField0_ & 0x00000001) == 0x00000001)) {
31102             resultOrException_ = java.util.Collections.unmodifiableList(resultOrException_);
31103             bitField0_ = (bitField0_ & ~0x00000001);
31104           }
31105           result.resultOrException_ = resultOrException_;
31106         } else {
31107           result.resultOrException_ = resultOrExceptionBuilder_.build();
31108         }
31109         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
31110           to_bitField0_ |= 0x00000001;
31111         }
31112         if (exceptionBuilder_ == null) {
31113           result.exception_ = exception_;
31114         } else {
31115           result.exception_ = exceptionBuilder_.build();
31116         }
31117         result.bitField0_ = to_bitField0_;
31118         onBuilt();
31119         return result;
31120       }
31121 
mergeFrom(com.google.protobuf.Message other)31122       public Builder mergeFrom(com.google.protobuf.Message other) {
31123         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) {
31124           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult)other);
31125         } else {
31126           super.mergeFrom(other);
31127           return this;
31128         }
31129       }
31130 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other)31131       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult other) {
31132         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance()) return this;
31133         if (resultOrExceptionBuilder_ == null) {
31134           if (!other.resultOrException_.isEmpty()) {
31135             if (resultOrException_.isEmpty()) {
31136               resultOrException_ = other.resultOrException_;
31137               bitField0_ = (bitField0_ & ~0x00000001);
31138             } else {
31139               ensureResultOrExceptionIsMutable();
31140               resultOrException_.addAll(other.resultOrException_);
31141             }
31142             onChanged();
31143           }
31144         } else {
31145           if (!other.resultOrException_.isEmpty()) {
31146             if (resultOrExceptionBuilder_.isEmpty()) {
31147               resultOrExceptionBuilder_.dispose();
31148               resultOrExceptionBuilder_ = null;
31149               resultOrException_ = other.resultOrException_;
31150               bitField0_ = (bitField0_ & ~0x00000001);
31151               resultOrExceptionBuilder_ =
31152                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
31153                    getResultOrExceptionFieldBuilder() : null;
31154             } else {
31155               resultOrExceptionBuilder_.addAllMessages(other.resultOrException_);
31156             }
31157           }
31158         }
31159         if (other.hasException()) {
31160           mergeException(other.getException());
31161         }
31162         this.mergeUnknownFields(other.getUnknownFields());
31163         return this;
31164       }
31165 
isInitialized()31166       public final boolean isInitialized() {
31167         for (int i = 0; i < getResultOrExceptionCount(); i++) {
31168           if (!getResultOrException(i).isInitialized()) {
31169 
31170             return false;
31171           }
31172         }
31173         if (hasException()) {
31174           if (!getException().isInitialized()) {
31175 
31176             return false;
31177           }
31178         }
31179         return true;
31180       }
31181 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31182       public Builder mergeFrom(
31183           com.google.protobuf.CodedInputStream input,
31184           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31185           throws java.io.IOException {
31186         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult parsedMessage = null;
31187         try {
31188           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
31189         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31190           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult) e.getUnfinishedMessage();
31191           throw e;
31192         } finally {
31193           if (parsedMessage != null) {
31194             mergeFrom(parsedMessage);
31195           }
31196         }
31197         return this;
31198       }
31199       private int bitField0_;
31200 
31201       // repeated .ResultOrException resultOrException = 1;
31202       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> resultOrException_ =
31203         java.util.Collections.emptyList();
ensureResultOrExceptionIsMutable()31204       private void ensureResultOrExceptionIsMutable() {
31205         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
31206           resultOrException_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException>(resultOrException_);
31207           bitField0_ |= 0x00000001;
31208          }
31209       }
31210 
31211       private com.google.protobuf.RepeatedFieldBuilder<
31212           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder> resultOrExceptionBuilder_;
31213 
31214       /**
31215        * <code>repeated .ResultOrException resultOrException = 1;</code>
31216        */
getResultOrExceptionList()31217       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> getResultOrExceptionList() {
31218         if (resultOrExceptionBuilder_ == null) {
31219           return java.util.Collections.unmodifiableList(resultOrException_);
31220         } else {
31221           return resultOrExceptionBuilder_.getMessageList();
31222         }
31223       }
31224       /**
31225        * <code>repeated .ResultOrException resultOrException = 1;</code>
31226        */
getResultOrExceptionCount()31227       public int getResultOrExceptionCount() {
31228         if (resultOrExceptionBuilder_ == null) {
31229           return resultOrException_.size();
31230         } else {
31231           return resultOrExceptionBuilder_.getCount();
31232         }
31233       }
31234       /**
31235        * <code>repeated .ResultOrException resultOrException = 1;</code>
31236        */
getResultOrException(int index)31237       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException getResultOrException(int index) {
31238         if (resultOrExceptionBuilder_ == null) {
31239           return resultOrException_.get(index);
31240         } else {
31241           return resultOrExceptionBuilder_.getMessage(index);
31242         }
31243       }
31244       /**
31245        * <code>repeated .ResultOrException resultOrException = 1;</code>
31246        */
setResultOrException( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value)31247       public Builder setResultOrException(
31248           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
31249         if (resultOrExceptionBuilder_ == null) {
31250           if (value == null) {
31251             throw new NullPointerException();
31252           }
31253           ensureResultOrExceptionIsMutable();
31254           resultOrException_.set(index, value);
31255           onChanged();
31256         } else {
31257           resultOrExceptionBuilder_.setMessage(index, value);
31258         }
31259         return this;
31260       }
31261       /**
31262        * <code>repeated .ResultOrException resultOrException = 1;</code>
31263        */
setResultOrException( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue)31264       public Builder setResultOrException(
31265           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
31266         if (resultOrExceptionBuilder_ == null) {
31267           ensureResultOrExceptionIsMutable();
31268           resultOrException_.set(index, builderForValue.build());
31269           onChanged();
31270         } else {
31271           resultOrExceptionBuilder_.setMessage(index, builderForValue.build());
31272         }
31273         return this;
31274       }
31275       /**
31276        * <code>repeated .ResultOrException resultOrException = 1;</code>
31277        */
addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value)31278       public Builder addResultOrException(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
31279         if (resultOrExceptionBuilder_ == null) {
31280           if (value == null) {
31281             throw new NullPointerException();
31282           }
31283           ensureResultOrExceptionIsMutable();
31284           resultOrException_.add(value);
31285           onChanged();
31286         } else {
31287           resultOrExceptionBuilder_.addMessage(value);
31288         }
31289         return this;
31290       }
31291       /**
31292        * <code>repeated .ResultOrException resultOrException = 1;</code>
31293        */
addResultOrException( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value)31294       public Builder addResultOrException(
31295           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException value) {
31296         if (resultOrExceptionBuilder_ == null) {
31297           if (value == null) {
31298             throw new NullPointerException();
31299           }
31300           ensureResultOrExceptionIsMutable();
31301           resultOrException_.add(index, value);
31302           onChanged();
31303         } else {
31304           resultOrExceptionBuilder_.addMessage(index, value);
31305         }
31306         return this;
31307       }
31308       /**
31309        * <code>repeated .ResultOrException resultOrException = 1;</code>
31310        */
addResultOrException( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue)31311       public Builder addResultOrException(
31312           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
31313         if (resultOrExceptionBuilder_ == null) {
31314           ensureResultOrExceptionIsMutable();
31315           resultOrException_.add(builderForValue.build());
31316           onChanged();
31317         } else {
31318           resultOrExceptionBuilder_.addMessage(builderForValue.build());
31319         }
31320         return this;
31321       }
31322       /**
31323        * <code>repeated .ResultOrException resultOrException = 1;</code>
31324        */
addResultOrException( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue)31325       public Builder addResultOrException(
31326           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder builderForValue) {
31327         if (resultOrExceptionBuilder_ == null) {
31328           ensureResultOrExceptionIsMutable();
31329           resultOrException_.add(index, builderForValue.build());
31330           onChanged();
31331         } else {
31332           resultOrExceptionBuilder_.addMessage(index, builderForValue.build());
31333         }
31334         return this;
31335       }
31336       /**
31337        * <code>repeated .ResultOrException resultOrException = 1;</code>
31338        */
addAllResultOrException( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> values)31339       public Builder addAllResultOrException(
31340           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException> values) {
31341         if (resultOrExceptionBuilder_ == null) {
31342           ensureResultOrExceptionIsMutable();
31343           super.addAll(values, resultOrException_);
31344           onChanged();
31345         } else {
31346           resultOrExceptionBuilder_.addAllMessages(values);
31347         }
31348         return this;
31349       }
31350       /**
31351        * <code>repeated .ResultOrException resultOrException = 1;</code>
31352        */
clearResultOrException()31353       public Builder clearResultOrException() {
31354         if (resultOrExceptionBuilder_ == null) {
31355           resultOrException_ = java.util.Collections.emptyList();
31356           bitField0_ = (bitField0_ & ~0x00000001);
31357           onChanged();
31358         } else {
31359           resultOrExceptionBuilder_.clear();
31360         }
31361         return this;
31362       }
31363       /**
31364        * <code>repeated .ResultOrException resultOrException = 1;</code>
31365        */
removeResultOrException(int index)31366       public Builder removeResultOrException(int index) {
31367         if (resultOrExceptionBuilder_ == null) {
31368           ensureResultOrExceptionIsMutable();
31369           resultOrException_.remove(index);
31370           onChanged();
31371         } else {
31372           resultOrExceptionBuilder_.remove(index);
31373         }
31374         return this;
31375       }
31376       /**
31377        * <code>repeated .ResultOrException resultOrException = 1;</code>
31378        */
getResultOrExceptionBuilder( int index)31379       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder getResultOrExceptionBuilder(
31380           int index) {
31381         return getResultOrExceptionFieldBuilder().getBuilder(index);
31382       }
31383       /**
31384        * <code>repeated .ResultOrException resultOrException = 1;</code>
31385        */
getResultOrExceptionOrBuilder( int index)31386       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder getResultOrExceptionOrBuilder(
31387           int index) {
31388         if (resultOrExceptionBuilder_ == null) {
31389           return resultOrException_.get(index);  } else {
31390           return resultOrExceptionBuilder_.getMessageOrBuilder(index);
31391         }
31392       }
31393       /**
31394        * <code>repeated .ResultOrException resultOrException = 1;</code>
31395        */
31396       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionOrBuilderList()31397            getResultOrExceptionOrBuilderList() {
31398         if (resultOrExceptionBuilder_ != null) {
31399           return resultOrExceptionBuilder_.getMessageOrBuilderList();
31400         } else {
31401           return java.util.Collections.unmodifiableList(resultOrException_);
31402         }
31403       }
31404       /**
31405        * <code>repeated .ResultOrException resultOrException = 1;</code>
31406        */
addResultOrExceptionBuilder()31407       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder() {
31408         return getResultOrExceptionFieldBuilder().addBuilder(
31409             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
31410       }
31411       /**
31412        * <code>repeated .ResultOrException resultOrException = 1;</code>
31413        */
addResultOrExceptionBuilder( int index)31414       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder addResultOrExceptionBuilder(
31415           int index) {
31416         return getResultOrExceptionFieldBuilder().addBuilder(
31417             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.getDefaultInstance());
31418       }
31419       /**
31420        * <code>repeated .ResultOrException resultOrException = 1;</code>
31421        */
31422       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder>
getResultOrExceptionBuilderList()31423            getResultOrExceptionBuilderList() {
31424         return getResultOrExceptionFieldBuilder().getBuilderList();
31425       }
31426       private com.google.protobuf.RepeatedFieldBuilder<
31427           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>
getResultOrExceptionFieldBuilder()31428           getResultOrExceptionFieldBuilder() {
31429         if (resultOrExceptionBuilder_ == null) {
31430           resultOrExceptionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
31431               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrException.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrExceptionOrBuilder>(
31432                   resultOrException_,
31433                   ((bitField0_ & 0x00000001) == 0x00000001),
31434                   getParentForChildren(),
31435                   isClean());
31436           resultOrException_ = null;
31437         }
31438         return resultOrExceptionBuilder_;
31439       }
31440 
31441       // optional .NameBytesPair exception = 2;
31442       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
31443       private com.google.protobuf.SingleFieldBuilder<
31444           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> exceptionBuilder_;
31445       /**
31446        * <code>optional .NameBytesPair exception = 2;</code>
31447        *
31448        * <pre>
31449        * If the operation failed globally for this region, this exception is set
31450        * </pre>
31451        */
hasException()31452       public boolean hasException() {
31453         return ((bitField0_ & 0x00000002) == 0x00000002);
31454       }
31455       /**
31456        * <code>optional .NameBytesPair exception = 2;</code>
31457        *
31458        * <pre>
31459        * If the operation failed globally for this region, this exception is set
31460        * </pre>
31461        */
getException()31462       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getException() {
31463         if (exceptionBuilder_ == null) {
31464           return exception_;
31465         } else {
31466           return exceptionBuilder_.getMessage();
31467         }
31468       }
31469       /**
31470        * <code>optional .NameBytesPair exception = 2;</code>
31471        *
31472        * <pre>
31473        * If the operation failed globally for this region, this exception is set
31474        * </pre>
31475        */
setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)31476       public Builder setException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
31477         if (exceptionBuilder_ == null) {
31478           if (value == null) {
31479             throw new NullPointerException();
31480           }
31481           exception_ = value;
31482           onChanged();
31483         } else {
31484           exceptionBuilder_.setMessage(value);
31485         }
31486         bitField0_ |= 0x00000002;
31487         return this;
31488       }
31489       /**
31490        * <code>optional .NameBytesPair exception = 2;</code>
31491        *
31492        * <pre>
31493        * If the operation failed globally for this region, this exception is set
31494        * </pre>
31495        */
setException( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)31496       public Builder setException(
31497           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
31498         if (exceptionBuilder_ == null) {
31499           exception_ = builderForValue.build();
31500           onChanged();
31501         } else {
31502           exceptionBuilder_.setMessage(builderForValue.build());
31503         }
31504         bitField0_ |= 0x00000002;
31505         return this;
31506       }
31507       /**
31508        * <code>optional .NameBytesPair exception = 2;</code>
31509        *
31510        * <pre>
31511        * If the operation failed globally for this region, this exception is set
31512        * </pre>
31513        */
mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)31514       public Builder mergeException(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
31515         if (exceptionBuilder_ == null) {
31516           if (((bitField0_ & 0x00000002) == 0x00000002) &&
31517               exception_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
31518             exception_ =
31519               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(exception_).mergeFrom(value).buildPartial();
31520           } else {
31521             exception_ = value;
31522           }
31523           onChanged();
31524         } else {
31525           exceptionBuilder_.mergeFrom(value);
31526         }
31527         bitField0_ |= 0x00000002;
31528         return this;
31529       }
31530       /**
31531        * <code>optional .NameBytesPair exception = 2;</code>
31532        *
31533        * <pre>
31534        * If the operation failed globally for this region, this exception is set
31535        * </pre>
31536        */
clearException()31537       public Builder clearException() {
31538         if (exceptionBuilder_ == null) {
31539           exception_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
31540           onChanged();
31541         } else {
31542           exceptionBuilder_.clear();
31543         }
31544         bitField0_ = (bitField0_ & ~0x00000002);
31545         return this;
31546       }
31547       /**
31548        * <code>optional .NameBytesPair exception = 2;</code>
31549        *
31550        * <pre>
31551        * If the operation failed globally for this region, this exception is set
31552        * </pre>
31553        */
getExceptionBuilder()31554       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getExceptionBuilder() {
31555         bitField0_ |= 0x00000002;
31556         onChanged();
31557         return getExceptionFieldBuilder().getBuilder();
31558       }
31559       /**
31560        * <code>optional .NameBytesPair exception = 2;</code>
31561        *
31562        * <pre>
31563        * If the operation failed globally for this region, this exception is set
31564        * </pre>
31565        */
getExceptionOrBuilder()31566       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getExceptionOrBuilder() {
31567         if (exceptionBuilder_ != null) {
31568           return exceptionBuilder_.getMessageOrBuilder();
31569         } else {
31570           return exception_;
31571         }
31572       }
31573       /**
31574        * <code>optional .NameBytesPair exception = 2;</code>
31575        *
31576        * <pre>
31577        * If the operation failed globally for this region, this exception is set
31578        * </pre>
31579        */
31580       private com.google.protobuf.SingleFieldBuilder<
31581           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
getExceptionFieldBuilder()31582           getExceptionFieldBuilder() {
31583         if (exceptionBuilder_ == null) {
31584           exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
31585               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
31586                   exception_,
31587                   getParentForChildren(),
31588                   isClean());
31589           exception_ = null;
31590         }
31591         return exceptionBuilder_;
31592       }
31593 
31594       // @@protoc_insertion_point(builder_scope:RegionActionResult)
31595     }
31596 
31597     static {
31598       defaultInstance = new RegionActionResult(true);
defaultInstance.initFields()31599       defaultInstance.initFields();
31600     }
31601 
31602     // @@protoc_insertion_point(class_scope:RegionActionResult)
31603   }
31604 
31605   public interface MultiRequestOrBuilder
31606       extends com.google.protobuf.MessageOrBuilder {
31607 
31608     // repeated .RegionAction regionAction = 1;
31609     /**
31610      * <code>repeated .RegionAction regionAction = 1;</code>
31611      */
31612     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>
getRegionActionList()31613         getRegionActionList();
31614     /**
31615      * <code>repeated .RegionAction regionAction = 1;</code>
31616      */
getRegionAction(int index)31617     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index);
31618     /**
31619      * <code>repeated .RegionAction regionAction = 1;</code>
31620      */
getRegionActionCount()31621     int getRegionActionCount();
31622     /**
31623      * <code>repeated .RegionAction regionAction = 1;</code>
31624      */
31625     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionOrBuilderList()31626         getRegionActionOrBuilderList();
31627     /**
31628      * <code>repeated .RegionAction regionAction = 1;</code>
31629      */
getRegionActionOrBuilder( int index)31630     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
31631         int index);
31632 
31633     // optional uint64 nonceGroup = 2;
31634     /**
31635      * <code>optional uint64 nonceGroup = 2;</code>
31636      */
hasNonceGroup()31637     boolean hasNonceGroup();
31638     /**
31639      * <code>optional uint64 nonceGroup = 2;</code>
31640      */
getNonceGroup()31641     long getNonceGroup();
31642 
31643     // optional .Condition condition = 3;
31644     /**
31645      * <code>optional .Condition condition = 3;</code>
31646      */
hasCondition()31647     boolean hasCondition();
31648     /**
31649      * <code>optional .Condition condition = 3;</code>
31650      */
getCondition()31651     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition();
31652     /**
31653      * <code>optional .Condition condition = 3;</code>
31654      */
getConditionOrBuilder()31655     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder();
31656   }
31657   /**
31658    * Protobuf type {@code MultiRequest}
31659    *
31660    * <pre>
31661    **
31662    * Execute a list of actions on a given region in order.
31663    * Nothing prevents a request to contains a set of RegionAction on the same region.
31664    * For this reason, the matching between the MultiRequest and the MultiResponse is not
31665    *  done by the region specifier but by keeping the order of the RegionActionResult vs.
31666    *  the order of the RegionAction.
31667    * </pre>
31668    */
31669   public static final class MultiRequest extends
31670       com.google.protobuf.GeneratedMessage
31671       implements MultiRequestOrBuilder {
31672     // Use MultiRequest.newBuilder() to construct.
MultiRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)31673     private MultiRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
31674       super(builder);
31675       this.unknownFields = builder.getUnknownFields();
31676     }
MultiRequest(boolean noInit)31677     private MultiRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
31678 
31679     private static final MultiRequest defaultInstance;
getDefaultInstance()31680     public static MultiRequest getDefaultInstance() {
31681       return defaultInstance;
31682     }
31683 
getDefaultInstanceForType()31684     public MultiRequest getDefaultInstanceForType() {
31685       return defaultInstance;
31686     }
31687 
31688     private final com.google.protobuf.UnknownFieldSet unknownFields;
31689     @java.lang.Override
31690     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()31691         getUnknownFields() {
31692       return this.unknownFields;
31693     }
MultiRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31694     private MultiRequest(
31695         com.google.protobuf.CodedInputStream input,
31696         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31697         throws com.google.protobuf.InvalidProtocolBufferException {
31698       initFields();
31699       int mutable_bitField0_ = 0;
31700       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
31701           com.google.protobuf.UnknownFieldSet.newBuilder();
31702       try {
31703         boolean done = false;
31704         while (!done) {
31705           int tag = input.readTag();
31706           switch (tag) {
31707             case 0:
31708               done = true;
31709               break;
31710             default: {
31711               if (!parseUnknownField(input, unknownFields,
31712                                      extensionRegistry, tag)) {
31713                 done = true;
31714               }
31715               break;
31716             }
31717             case 10: {
31718               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
31719                 regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>();
31720                 mutable_bitField0_ |= 0x00000001;
31721               }
31722               regionAction_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.PARSER, extensionRegistry));
31723               break;
31724             }
31725             case 16: {
31726               bitField0_ |= 0x00000001;
31727               nonceGroup_ = input.readUInt64();
31728               break;
31729             }
31730             case 26: {
31731               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null;
31732               if (((bitField0_ & 0x00000002) == 0x00000002)) {
31733                 subBuilder = condition_.toBuilder();
31734               }
31735               condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry);
31736               if (subBuilder != null) {
31737                 subBuilder.mergeFrom(condition_);
31738                 condition_ = subBuilder.buildPartial();
31739               }
31740               bitField0_ |= 0x00000002;
31741               break;
31742             }
31743           }
31744         }
31745       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
31746         throw e.setUnfinishedMessage(this);
31747       } catch (java.io.IOException e) {
31748         throw new com.google.protobuf.InvalidProtocolBufferException(
31749             e.getMessage()).setUnfinishedMessage(this);
31750       } finally {
31751         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
31752           regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
31753         }
31754         this.unknownFields = unknownFields.build();
31755         makeExtensionsImmutable();
31756       }
31757     }
31758     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()31759         getDescriptor() {
31760       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor;
31761     }
31762 
31763     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()31764         internalGetFieldAccessorTable() {
31765       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable
31766           .ensureFieldAccessorsInitialized(
31767               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
31768     }
31769 
31770     public static com.google.protobuf.Parser<MultiRequest> PARSER =
31771         new com.google.protobuf.AbstractParser<MultiRequest>() {
31772       public MultiRequest parsePartialFrom(
31773           com.google.protobuf.CodedInputStream input,
31774           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31775           throws com.google.protobuf.InvalidProtocolBufferException {
31776         return new MultiRequest(input, extensionRegistry);
31777       }
31778     };
31779 
31780     @java.lang.Override
getParserForType()31781     public com.google.protobuf.Parser<MultiRequest> getParserForType() {
31782       return PARSER;
31783     }
31784 
31785     private int bitField0_;
31786     // repeated .RegionAction regionAction = 1;
31787     public static final int REGIONACTION_FIELD_NUMBER = 1;
31788     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_;
31789     /**
31790      * <code>repeated .RegionAction regionAction = 1;</code>
31791      */
getRegionActionList()31792     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
31793       return regionAction_;
31794     }
31795     /**
31796      * <code>repeated .RegionAction regionAction = 1;</code>
31797      */
31798     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionOrBuilderList()31799         getRegionActionOrBuilderList() {
31800       return regionAction_;
31801     }
31802     /**
31803      * <code>repeated .RegionAction regionAction = 1;</code>
31804      */
getRegionActionCount()31805     public int getRegionActionCount() {
31806       return regionAction_.size();
31807     }
31808     /**
31809      * <code>repeated .RegionAction regionAction = 1;</code>
31810      */
getRegionAction(int index)31811     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
31812       return regionAction_.get(index);
31813     }
31814     /**
31815      * <code>repeated .RegionAction regionAction = 1;</code>
31816      */
getRegionActionOrBuilder( int index)31817     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
31818         int index) {
31819       return regionAction_.get(index);
31820     }
31821 
31822     // optional uint64 nonceGroup = 2;
31823     public static final int NONCEGROUP_FIELD_NUMBER = 2;
31824     private long nonceGroup_;
31825     /**
31826      * <code>optional uint64 nonceGroup = 2;</code>
31827      */
hasNonceGroup()31828     public boolean hasNonceGroup() {
31829       return ((bitField0_ & 0x00000001) == 0x00000001);
31830     }
31831     /**
31832      * <code>optional uint64 nonceGroup = 2;</code>
31833      */
getNonceGroup()31834     public long getNonceGroup() {
31835       return nonceGroup_;
31836     }
31837 
31838     // optional .Condition condition = 3;
31839     public static final int CONDITION_FIELD_NUMBER = 3;
31840     private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_;
31841     /**
31842      * <code>optional .Condition condition = 3;</code>
31843      */
hasCondition()31844     public boolean hasCondition() {
31845       return ((bitField0_ & 0x00000002) == 0x00000002);
31846     }
31847     /**
31848      * <code>optional .Condition condition = 3;</code>
31849      */
getCondition()31850     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
31851       return condition_;
31852     }
31853     /**
31854      * <code>optional .Condition condition = 3;</code>
31855      */
getConditionOrBuilder()31856     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
31857       return condition_;
31858     }
31859 
initFields()31860     private void initFields() {
31861       regionAction_ = java.util.Collections.emptyList();
31862       nonceGroup_ = 0L;
31863       condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
31864     }
31865     private byte memoizedIsInitialized = -1;
isInitialized()31866     public final boolean isInitialized() {
31867       byte isInitialized = memoizedIsInitialized;
31868       if (isInitialized != -1) return isInitialized == 1;
31869 
31870       for (int i = 0; i < getRegionActionCount(); i++) {
31871         if (!getRegionAction(i).isInitialized()) {
31872           memoizedIsInitialized = 0;
31873           return false;
31874         }
31875       }
31876       if (hasCondition()) {
31877         if (!getCondition().isInitialized()) {
31878           memoizedIsInitialized = 0;
31879           return false;
31880         }
31881       }
31882       memoizedIsInitialized = 1;
31883       return true;
31884     }
31885 
writeTo(com.google.protobuf.CodedOutputStream output)31886     public void writeTo(com.google.protobuf.CodedOutputStream output)
31887                         throws java.io.IOException {
31888       getSerializedSize();
31889       for (int i = 0; i < regionAction_.size(); i++) {
31890         output.writeMessage(1, regionAction_.get(i));
31891       }
31892       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31893         output.writeUInt64(2, nonceGroup_);
31894       }
31895       if (((bitField0_ & 0x00000002) == 0x00000002)) {
31896         output.writeMessage(3, condition_);
31897       }
31898       getUnknownFields().writeTo(output);
31899     }
31900 
31901     private int memoizedSerializedSize = -1;
getSerializedSize()31902     public int getSerializedSize() {
31903       int size = memoizedSerializedSize;
31904       if (size != -1) return size;
31905 
31906       size = 0;
31907       for (int i = 0; i < regionAction_.size(); i++) {
31908         size += com.google.protobuf.CodedOutputStream
31909           .computeMessageSize(1, regionAction_.get(i));
31910       }
31911       if (((bitField0_ & 0x00000001) == 0x00000001)) {
31912         size += com.google.protobuf.CodedOutputStream
31913           .computeUInt64Size(2, nonceGroup_);
31914       }
31915       if (((bitField0_ & 0x00000002) == 0x00000002)) {
31916         size += com.google.protobuf.CodedOutputStream
31917           .computeMessageSize(3, condition_);
31918       }
31919       size += getUnknownFields().getSerializedSize();
31920       memoizedSerializedSize = size;
31921       return size;
31922     }
31923 
31924     private static final long serialVersionUID = 0L;
31925     @java.lang.Override
writeReplace()31926     protected java.lang.Object writeReplace()
31927         throws java.io.ObjectStreamException {
31928       return super.writeReplace();
31929     }
31930 
31931     @java.lang.Override
equals(final java.lang.Object obj)31932     public boolean equals(final java.lang.Object obj) {
31933       if (obj == this) {
31934        return true;
31935       }
31936       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)) {
31937         return super.equals(obj);
31938       }
31939       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) obj;
31940 
31941       boolean result = true;
31942       result = result && getRegionActionList()
31943           .equals(other.getRegionActionList());
31944       result = result && (hasNonceGroup() == other.hasNonceGroup());
31945       if (hasNonceGroup()) {
31946         result = result && (getNonceGroup()
31947             == other.getNonceGroup());
31948       }
31949       result = result && (hasCondition() == other.hasCondition());
31950       if (hasCondition()) {
31951         result = result && getCondition()
31952             .equals(other.getCondition());
31953       }
31954       result = result &&
31955           getUnknownFields().equals(other.getUnknownFields());
31956       return result;
31957     }
31958 
31959     private int memoizedHashCode = 0;
31960     @java.lang.Override
hashCode()31961     public int hashCode() {
31962       if (memoizedHashCode != 0) {
31963         return memoizedHashCode;
31964       }
31965       int hash = 41;
31966       hash = (19 * hash) + getDescriptorForType().hashCode();
31967       if (getRegionActionCount() > 0) {
31968         hash = (37 * hash) + REGIONACTION_FIELD_NUMBER;
31969         hash = (53 * hash) + getRegionActionList().hashCode();
31970       }
31971       if (hasNonceGroup()) {
31972         hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
31973         hash = (53 * hash) + hashLong(getNonceGroup());
31974       }
31975       if (hasCondition()) {
31976         hash = (37 * hash) + CONDITION_FIELD_NUMBER;
31977         hash = (53 * hash) + getCondition().hashCode();
31978       }
31979       hash = (29 * hash) + getUnknownFields().hashCode();
31980       memoizedHashCode = hash;
31981       return hash;
31982     }
31983 
parseFrom( com.google.protobuf.ByteString data)31984     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
31985         com.google.protobuf.ByteString data)
31986         throws com.google.protobuf.InvalidProtocolBufferException {
31987       return PARSER.parseFrom(data);
31988     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31989     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
31990         com.google.protobuf.ByteString data,
31991         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
31992         throws com.google.protobuf.InvalidProtocolBufferException {
31993       return PARSER.parseFrom(data, extensionRegistry);
31994     }
parseFrom(byte[] data)31995     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(byte[] data)
31996         throws com.google.protobuf.InvalidProtocolBufferException {
31997       return PARSER.parseFrom(data);
31998     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)31999     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
32000         byte[] data,
32001         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32002         throws com.google.protobuf.InvalidProtocolBufferException {
32003       return PARSER.parseFrom(data, extensionRegistry);
32004     }
parseFrom(java.io.InputStream input)32005     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(java.io.InputStream input)
32006         throws java.io.IOException {
32007       return PARSER.parseFrom(input);
32008     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32009     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
32010         java.io.InputStream input,
32011         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32012         throws java.io.IOException {
32013       return PARSER.parseFrom(input, extensionRegistry);
32014     }
parseDelimitedFrom(java.io.InputStream input)32015     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input)
32016         throws java.io.IOException {
32017       return PARSER.parseDelimitedFrom(input);
32018     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32019     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseDelimitedFrom(
32020         java.io.InputStream input,
32021         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32022         throws java.io.IOException {
32023       return PARSER.parseDelimitedFrom(input, extensionRegistry);
32024     }
parseFrom( com.google.protobuf.CodedInputStream input)32025     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
32026         com.google.protobuf.CodedInputStream input)
32027         throws java.io.IOException {
32028       return PARSER.parseFrom(input);
32029     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32030     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parseFrom(
32031         com.google.protobuf.CodedInputStream input,
32032         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32033         throws java.io.IOException {
32034       return PARSER.parseFrom(input, extensionRegistry);
32035     }
32036 
newBuilder()32037     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()32038     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype)32039     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest prototype) {
32040       return newBuilder().mergeFrom(prototype);
32041     }
toBuilder()32042     public Builder toBuilder() { return newBuilder(this); }
32043 
32044     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)32045     protected Builder newBuilderForType(
32046         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
32047       Builder builder = new Builder(parent);
32048       return builder;
32049     }
32050     /**
32051      * Protobuf type {@code MultiRequest}
32052      *
32053      * <pre>
32054      **
32055      * Execute a list of actions on a given region in order.
32056      * Nothing prevents a request to contains a set of RegionAction on the same region.
32057      * For this reason, the matching between the MultiRequest and the MultiResponse is not
32058      *  done by the region specifier but by keeping the order of the RegionActionResult vs.
32059      *  the order of the RegionAction.
32060      * </pre>
32061      */
32062     public static final class Builder extends
32063         com.google.protobuf.GeneratedMessage.Builder<Builder>
32064        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequestOrBuilder {
32065       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()32066           getDescriptor() {
32067         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor;
32068       }
32069 
32070       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()32071           internalGetFieldAccessorTable() {
32072         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_fieldAccessorTable
32073             .ensureFieldAccessorsInitialized(
32074                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
32075       }
32076 
32077       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.newBuilder()
Builder()32078       private Builder() {
32079         maybeForceBuilderInitialization();
32080       }
32081 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)32082       private Builder(
32083           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
32084         super(parent);
32085         maybeForceBuilderInitialization();
32086       }
maybeForceBuilderInitialization()32087       private void maybeForceBuilderInitialization() {
32088         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
32089           getRegionActionFieldBuilder();
32090           getConditionFieldBuilder();
32091         }
32092       }
create()32093       private static Builder create() {
32094         return new Builder();
32095       }
32096 
clear()32097       public Builder clear() {
32098         super.clear();
32099         if (regionActionBuilder_ == null) {
32100           regionAction_ = java.util.Collections.emptyList();
32101           bitField0_ = (bitField0_ & ~0x00000001);
32102         } else {
32103           regionActionBuilder_.clear();
32104         }
32105         nonceGroup_ = 0L;
32106         bitField0_ = (bitField0_ & ~0x00000002);
32107         if (conditionBuilder_ == null) {
32108           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
32109         } else {
32110           conditionBuilder_.clear();
32111         }
32112         bitField0_ = (bitField0_ & ~0x00000004);
32113         return this;
32114       }
32115 
clone()32116       public Builder clone() {
32117         return create().mergeFrom(buildPartial());
32118       }
32119 
32120       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()32121           getDescriptorForType() {
32122         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiRequest_descriptor;
32123       }
32124 
getDefaultInstanceForType()32125       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest getDefaultInstanceForType() {
32126         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
32127       }
32128 
build()32129       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest build() {
32130         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = buildPartial();
32131         if (!result.isInitialized()) {
32132           throw newUninitializedMessageException(result);
32133         }
32134         return result;
32135       }
32136 
buildPartial()32137       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest buildPartial() {
32138         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest(this);
32139         int from_bitField0_ = bitField0_;
32140         int to_bitField0_ = 0;
32141         if (regionActionBuilder_ == null) {
32142           if (((bitField0_ & 0x00000001) == 0x00000001)) {
32143             regionAction_ = java.util.Collections.unmodifiableList(regionAction_);
32144             bitField0_ = (bitField0_ & ~0x00000001);
32145           }
32146           result.regionAction_ = regionAction_;
32147         } else {
32148           result.regionAction_ = regionActionBuilder_.build();
32149         }
32150         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
32151           to_bitField0_ |= 0x00000001;
32152         }
32153         result.nonceGroup_ = nonceGroup_;
32154         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
32155           to_bitField0_ |= 0x00000002;
32156         }
32157         if (conditionBuilder_ == null) {
32158           result.condition_ = condition_;
32159         } else {
32160           result.condition_ = conditionBuilder_.build();
32161         }
32162         result.bitField0_ = to_bitField0_;
32163         onBuilt();
32164         return result;
32165       }
32166 
mergeFrom(com.google.protobuf.Message other)32167       public Builder mergeFrom(com.google.protobuf.Message other) {
32168         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) {
32169           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)other);
32170         } else {
32171           super.mergeFrom(other);
32172           return this;
32173         }
32174       }
32175 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other)32176       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest other) {
32177         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance()) return this;
32178         if (regionActionBuilder_ == null) {
32179           if (!other.regionAction_.isEmpty()) {
32180             if (regionAction_.isEmpty()) {
32181               regionAction_ = other.regionAction_;
32182               bitField0_ = (bitField0_ & ~0x00000001);
32183             } else {
32184               ensureRegionActionIsMutable();
32185               regionAction_.addAll(other.regionAction_);
32186             }
32187             onChanged();
32188           }
32189         } else {
32190           if (!other.regionAction_.isEmpty()) {
32191             if (regionActionBuilder_.isEmpty()) {
32192               regionActionBuilder_.dispose();
32193               regionActionBuilder_ = null;
32194               regionAction_ = other.regionAction_;
32195               bitField0_ = (bitField0_ & ~0x00000001);
32196               regionActionBuilder_ =
32197                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
32198                    getRegionActionFieldBuilder() : null;
32199             } else {
32200               regionActionBuilder_.addAllMessages(other.regionAction_);
32201             }
32202           }
32203         }
32204         if (other.hasNonceGroup()) {
32205           setNonceGroup(other.getNonceGroup());
32206         }
32207         if (other.hasCondition()) {
32208           mergeCondition(other.getCondition());
32209         }
32210         this.mergeUnknownFields(other.getUnknownFields());
32211         return this;
32212       }
32213 
isInitialized()32214       public final boolean isInitialized() {
32215         for (int i = 0; i < getRegionActionCount(); i++) {
32216           if (!getRegionAction(i).isInitialized()) {
32217 
32218             return false;
32219           }
32220         }
32221         if (hasCondition()) {
32222           if (!getCondition().isInitialized()) {
32223 
32224             return false;
32225           }
32226         }
32227         return true;
32228       }
32229 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32230       public Builder mergeFrom(
32231           com.google.protobuf.CodedInputStream input,
32232           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32233           throws java.io.IOException {
32234         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest parsedMessage = null;
32235         try {
32236           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
32237         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32238           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest) e.getUnfinishedMessage();
32239           throw e;
32240         } finally {
32241           if (parsedMessage != null) {
32242             mergeFrom(parsedMessage);
32243           }
32244         }
32245         return this;
32246       }
32247       private int bitField0_;
32248 
32249       // repeated .RegionAction regionAction = 1;
32250       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> regionAction_ =
32251         java.util.Collections.emptyList();
ensureRegionActionIsMutable()32252       private void ensureRegionActionIsMutable() {
32253         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
32254           regionAction_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction>(regionAction_);
32255           bitField0_ |= 0x00000001;
32256          }
32257       }
32258 
32259       private com.google.protobuf.RepeatedFieldBuilder<
32260           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder> regionActionBuilder_;
32261 
32262       /**
32263        * <code>repeated .RegionAction regionAction = 1;</code>
32264        */
getRegionActionList()32265       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> getRegionActionList() {
32266         if (regionActionBuilder_ == null) {
32267           return java.util.Collections.unmodifiableList(regionAction_);
32268         } else {
32269           return regionActionBuilder_.getMessageList();
32270         }
32271       }
32272       /**
32273        * <code>repeated .RegionAction regionAction = 1;</code>
32274        */
getRegionActionCount()32275       public int getRegionActionCount() {
32276         if (regionActionBuilder_ == null) {
32277           return regionAction_.size();
32278         } else {
32279           return regionActionBuilder_.getCount();
32280         }
32281       }
32282       /**
32283        * <code>repeated .RegionAction regionAction = 1;</code>
32284        */
getRegionAction(int index)32285       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction getRegionAction(int index) {
32286         if (regionActionBuilder_ == null) {
32287           return regionAction_.get(index);
32288         } else {
32289           return regionActionBuilder_.getMessage(index);
32290         }
32291       }
32292       /**
32293        * <code>repeated .RegionAction regionAction = 1;</code>
32294        */
setRegionAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value)32295       public Builder setRegionAction(
32296           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
32297         if (regionActionBuilder_ == null) {
32298           if (value == null) {
32299             throw new NullPointerException();
32300           }
32301           ensureRegionActionIsMutable();
32302           regionAction_.set(index, value);
32303           onChanged();
32304         } else {
32305           regionActionBuilder_.setMessage(index, value);
32306         }
32307         return this;
32308       }
32309       /**
32310        * <code>repeated .RegionAction regionAction = 1;</code>
32311        */
setRegionAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue)32312       public Builder setRegionAction(
32313           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
32314         if (regionActionBuilder_ == null) {
32315           ensureRegionActionIsMutable();
32316           regionAction_.set(index, builderForValue.build());
32317           onChanged();
32318         } else {
32319           regionActionBuilder_.setMessage(index, builderForValue.build());
32320         }
32321         return this;
32322       }
32323       /**
32324        * <code>repeated .RegionAction regionAction = 1;</code>
32325        */
addRegionAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value)32326       public Builder addRegionAction(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
32327         if (regionActionBuilder_ == null) {
32328           if (value == null) {
32329             throw new NullPointerException();
32330           }
32331           ensureRegionActionIsMutable();
32332           regionAction_.add(value);
32333           onChanged();
32334         } else {
32335           regionActionBuilder_.addMessage(value);
32336         }
32337         return this;
32338       }
32339       /**
32340        * <code>repeated .RegionAction regionAction = 1;</code>
32341        */
addRegionAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value)32342       public Builder addRegionAction(
32343           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction value) {
32344         if (regionActionBuilder_ == null) {
32345           if (value == null) {
32346             throw new NullPointerException();
32347           }
32348           ensureRegionActionIsMutable();
32349           regionAction_.add(index, value);
32350           onChanged();
32351         } else {
32352           regionActionBuilder_.addMessage(index, value);
32353         }
32354         return this;
32355       }
32356       /**
32357        * <code>repeated .RegionAction regionAction = 1;</code>
32358        */
addRegionAction( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue)32359       public Builder addRegionAction(
32360           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
32361         if (regionActionBuilder_ == null) {
32362           ensureRegionActionIsMutable();
32363           regionAction_.add(builderForValue.build());
32364           onChanged();
32365         } else {
32366           regionActionBuilder_.addMessage(builderForValue.build());
32367         }
32368         return this;
32369       }
32370       /**
32371        * <code>repeated .RegionAction regionAction = 1;</code>
32372        */
addRegionAction( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue)32373       public Builder addRegionAction(
32374           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder builderForValue) {
32375         if (regionActionBuilder_ == null) {
32376           ensureRegionActionIsMutable();
32377           regionAction_.add(index, builderForValue.build());
32378           onChanged();
32379         } else {
32380           regionActionBuilder_.addMessage(index, builderForValue.build());
32381         }
32382         return this;
32383       }
32384       /**
32385        * <code>repeated .RegionAction regionAction = 1;</code>
32386        */
addAllRegionAction( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> values)32387       public Builder addAllRegionAction(
32388           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction> values) {
32389         if (regionActionBuilder_ == null) {
32390           ensureRegionActionIsMutable();
32391           super.addAll(values, regionAction_);
32392           onChanged();
32393         } else {
32394           regionActionBuilder_.addAllMessages(values);
32395         }
32396         return this;
32397       }
32398       /**
32399        * <code>repeated .RegionAction regionAction = 1;</code>
32400        */
clearRegionAction()32401       public Builder clearRegionAction() {
32402         if (regionActionBuilder_ == null) {
32403           regionAction_ = java.util.Collections.emptyList();
32404           bitField0_ = (bitField0_ & ~0x00000001);
32405           onChanged();
32406         } else {
32407           regionActionBuilder_.clear();
32408         }
32409         return this;
32410       }
32411       /**
32412        * <code>repeated .RegionAction regionAction = 1;</code>
32413        */
removeRegionAction(int index)32414       public Builder removeRegionAction(int index) {
32415         if (regionActionBuilder_ == null) {
32416           ensureRegionActionIsMutable();
32417           regionAction_.remove(index);
32418           onChanged();
32419         } else {
32420           regionActionBuilder_.remove(index);
32421         }
32422         return this;
32423       }
32424       /**
32425        * <code>repeated .RegionAction regionAction = 1;</code>
32426        */
getRegionActionBuilder( int index)32427       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder getRegionActionBuilder(
32428           int index) {
32429         return getRegionActionFieldBuilder().getBuilder(index);
32430       }
32431       /**
32432        * <code>repeated .RegionAction regionAction = 1;</code>
32433        */
getRegionActionOrBuilder( int index)32434       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder getRegionActionOrBuilder(
32435           int index) {
32436         if (regionActionBuilder_ == null) {
32437           return regionAction_.get(index);  } else {
32438           return regionActionBuilder_.getMessageOrBuilder(index);
32439         }
32440       }
32441       /**
32442        * <code>repeated .RegionAction regionAction = 1;</code>
32443        */
32444       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionOrBuilderList()32445            getRegionActionOrBuilderList() {
32446         if (regionActionBuilder_ != null) {
32447           return regionActionBuilder_.getMessageOrBuilderList();
32448         } else {
32449           return java.util.Collections.unmodifiableList(regionAction_);
32450         }
32451       }
32452       /**
32453        * <code>repeated .RegionAction regionAction = 1;</code>
32454        */
addRegionActionBuilder()32455       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder() {
32456         return getRegionActionFieldBuilder().addBuilder(
32457             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
32458       }
32459       /**
32460        * <code>repeated .RegionAction regionAction = 1;</code>
32461        */
addRegionActionBuilder( int index)32462       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder addRegionActionBuilder(
32463           int index) {
32464         return getRegionActionFieldBuilder().addBuilder(
32465             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.getDefaultInstance());
32466       }
32467       /**
32468        * <code>repeated .RegionAction regionAction = 1;</code>
32469        */
32470       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder>
getRegionActionBuilderList()32471            getRegionActionBuilderList() {
32472         return getRegionActionFieldBuilder().getBuilderList();
32473       }
32474       private com.google.protobuf.RepeatedFieldBuilder<
32475           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>
getRegionActionFieldBuilder()32476           getRegionActionFieldBuilder() {
32477         if (regionActionBuilder_ == null) {
32478           regionActionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
32479               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionOrBuilder>(
32480                   regionAction_,
32481                   ((bitField0_ & 0x00000001) == 0x00000001),
32482                   getParentForChildren(),
32483                   isClean());
32484           regionAction_ = null;
32485         }
32486         return regionActionBuilder_;
32487       }
32488 
32489       // optional uint64 nonceGroup = 2;
32490       private long nonceGroup_ ;
32491       /**
32492        * <code>optional uint64 nonceGroup = 2;</code>
32493        */
hasNonceGroup()32494       public boolean hasNonceGroup() {
32495         return ((bitField0_ & 0x00000002) == 0x00000002);
32496       }
32497       /**
32498        * <code>optional uint64 nonceGroup = 2;</code>
32499        */
getNonceGroup()32500       public long getNonceGroup() {
32501         return nonceGroup_;
32502       }
32503       /**
32504        * <code>optional uint64 nonceGroup = 2;</code>
32505        */
setNonceGroup(long value)32506       public Builder setNonceGroup(long value) {
32507         bitField0_ |= 0x00000002;
32508         nonceGroup_ = value;
32509         onChanged();
32510         return this;
32511       }
32512       /**
32513        * <code>optional uint64 nonceGroup = 2;</code>
32514        */
clearNonceGroup()32515       public Builder clearNonceGroup() {
32516         bitField0_ = (bitField0_ & ~0x00000002);
32517         nonceGroup_ = 0L;
32518         onChanged();
32519         return this;
32520       }
32521 
32522       // optional .Condition condition = 3;
32523       private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
32524       private com.google.protobuf.SingleFieldBuilder<
32525           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_;
32526       /**
32527        * <code>optional .Condition condition = 3;</code>
32528        */
hasCondition()32529       public boolean hasCondition() {
32530         return ((bitField0_ & 0x00000004) == 0x00000004);
32531       }
32532       /**
32533        * <code>optional .Condition condition = 3;</code>
32534        */
getCondition()32535       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() {
32536         if (conditionBuilder_ == null) {
32537           return condition_;
32538         } else {
32539           return conditionBuilder_.getMessage();
32540         }
32541       }
32542       /**
32543        * <code>optional .Condition condition = 3;</code>
32544        */
setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value)32545       public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
32546         if (conditionBuilder_ == null) {
32547           if (value == null) {
32548             throw new NullPointerException();
32549           }
32550           condition_ = value;
32551           onChanged();
32552         } else {
32553           conditionBuilder_.setMessage(value);
32554         }
32555         bitField0_ |= 0x00000004;
32556         return this;
32557       }
32558       /**
32559        * <code>optional .Condition condition = 3;</code>
32560        */
setCondition( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue)32561       public Builder setCondition(
32562           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) {
32563         if (conditionBuilder_ == null) {
32564           condition_ = builderForValue.build();
32565           onChanged();
32566         } else {
32567           conditionBuilder_.setMessage(builderForValue.build());
32568         }
32569         bitField0_ |= 0x00000004;
32570         return this;
32571       }
32572       /**
32573        * <code>optional .Condition condition = 3;</code>
32574        */
mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value)32575       public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) {
32576         if (conditionBuilder_ == null) {
32577           if (((bitField0_ & 0x00000004) == 0x00000004) &&
32578               condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) {
32579             condition_ =
32580               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial();
32581           } else {
32582             condition_ = value;
32583           }
32584           onChanged();
32585         } else {
32586           conditionBuilder_.mergeFrom(value);
32587         }
32588         bitField0_ |= 0x00000004;
32589         return this;
32590       }
32591       /**
32592        * <code>optional .Condition condition = 3;</code>
32593        */
clearCondition()32594       public Builder clearCondition() {
32595         if (conditionBuilder_ == null) {
32596           condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance();
32597           onChanged();
32598         } else {
32599           conditionBuilder_.clear();
32600         }
32601         bitField0_ = (bitField0_ & ~0x00000004);
32602         return this;
32603       }
32604       /**
32605        * <code>optional .Condition condition = 3;</code>
32606        */
getConditionBuilder()32607       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() {
32608         bitField0_ |= 0x00000004;
32609         onChanged();
32610         return getConditionFieldBuilder().getBuilder();
32611       }
32612       /**
32613        * <code>optional .Condition condition = 3;</code>
32614        */
getConditionOrBuilder()32615       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() {
32616         if (conditionBuilder_ != null) {
32617           return conditionBuilder_.getMessageOrBuilder();
32618         } else {
32619           return condition_;
32620         }
32621       }
32622       /**
32623        * <code>optional .Condition condition = 3;</code>
32624        */
32625       private com.google.protobuf.SingleFieldBuilder<
32626           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>
getConditionFieldBuilder()32627           getConditionFieldBuilder() {
32628         if (conditionBuilder_ == null) {
32629           conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
32630               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>(
32631                   condition_,
32632                   getParentForChildren(),
32633                   isClean());
32634           condition_ = null;
32635         }
32636         return conditionBuilder_;
32637       }
32638 
32639       // @@protoc_insertion_point(builder_scope:MultiRequest)
32640     }
32641 
32642     static {
32643       defaultInstance = new MultiRequest(true);
defaultInstance.initFields()32644       defaultInstance.initFields();
32645     }
32646 
32647     // @@protoc_insertion_point(class_scope:MultiRequest)
32648   }
32649 
32650   public interface MultiResponseOrBuilder
32651       extends com.google.protobuf.MessageOrBuilder {
32652 
32653     // repeated .RegionActionResult regionActionResult = 1;
32654     /**
32655      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32656      */
32657     java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>
getRegionActionResultList()32658         getRegionActionResultList();
32659     /**
32660      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32661      */
getRegionActionResult(int index)32662     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index);
32663     /**
32664      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32665      */
getRegionActionResultCount()32666     int getRegionActionResultCount();
32667     /**
32668      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32669      */
32670     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultOrBuilderList()32671         getRegionActionResultOrBuilderList();
32672     /**
32673      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32674      */
getRegionActionResultOrBuilder( int index)32675     org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
32676         int index);
32677 
32678     // optional bool processed = 2;
32679     /**
32680      * <code>optional bool processed = 2;</code>
32681      *
32682      * <pre>
32683      * used for mutate to indicate processed only
32684      * </pre>
32685      */
hasProcessed()32686     boolean hasProcessed();
32687     /**
32688      * <code>optional bool processed = 2;</code>
32689      *
32690      * <pre>
32691      * used for mutate to indicate processed only
32692      * </pre>
32693      */
getProcessed()32694     boolean getProcessed();
32695   }
32696   /**
32697    * Protobuf type {@code MultiResponse}
32698    */
32699   public static final class MultiResponse extends
32700       com.google.protobuf.GeneratedMessage
32701       implements MultiResponseOrBuilder {
32702     // Use MultiResponse.newBuilder() to construct.
MultiResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)32703     private MultiResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
32704       super(builder);
32705       this.unknownFields = builder.getUnknownFields();
32706     }
MultiResponse(boolean noInit)32707     private MultiResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
32708 
32709     private static final MultiResponse defaultInstance;
getDefaultInstance()32710     public static MultiResponse getDefaultInstance() {
32711       return defaultInstance;
32712     }
32713 
getDefaultInstanceForType()32714     public MultiResponse getDefaultInstanceForType() {
32715       return defaultInstance;
32716     }
32717 
32718     private final com.google.protobuf.UnknownFieldSet unknownFields;
32719     @java.lang.Override
32720     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()32721         getUnknownFields() {
32722       return this.unknownFields;
32723     }
MultiResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32724     private MultiResponse(
32725         com.google.protobuf.CodedInputStream input,
32726         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32727         throws com.google.protobuf.InvalidProtocolBufferException {
32728       initFields();
32729       int mutable_bitField0_ = 0;
32730       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
32731           com.google.protobuf.UnknownFieldSet.newBuilder();
32732       try {
32733         boolean done = false;
32734         while (!done) {
32735           int tag = input.readTag();
32736           switch (tag) {
32737             case 0:
32738               done = true;
32739               break;
32740             default: {
32741               if (!parseUnknownField(input, unknownFields,
32742                                      extensionRegistry, tag)) {
32743                 done = true;
32744               }
32745               break;
32746             }
32747             case 10: {
32748               if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
32749                 regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>();
32750                 mutable_bitField0_ |= 0x00000001;
32751               }
32752               regionActionResult_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.PARSER, extensionRegistry));
32753               break;
32754             }
32755             case 16: {
32756               bitField0_ |= 0x00000001;
32757               processed_ = input.readBool();
32758               break;
32759             }
32760           }
32761         }
32762       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
32763         throw e.setUnfinishedMessage(this);
32764       } catch (java.io.IOException e) {
32765         throw new com.google.protobuf.InvalidProtocolBufferException(
32766             e.getMessage()).setUnfinishedMessage(this);
32767       } finally {
32768         if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
32769           regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
32770         }
32771         this.unknownFields = unknownFields.build();
32772         makeExtensionsImmutable();
32773       }
32774     }
32775     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()32776         getDescriptor() {
32777       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor;
32778     }
32779 
32780     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()32781         internalGetFieldAccessorTable() {
32782       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable
32783           .ensureFieldAccessorsInitialized(
32784               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
32785     }
32786 
32787     public static com.google.protobuf.Parser<MultiResponse> PARSER =
32788         new com.google.protobuf.AbstractParser<MultiResponse>() {
32789       public MultiResponse parsePartialFrom(
32790           com.google.protobuf.CodedInputStream input,
32791           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32792           throws com.google.protobuf.InvalidProtocolBufferException {
32793         return new MultiResponse(input, extensionRegistry);
32794       }
32795     };
32796 
32797     @java.lang.Override
getParserForType()32798     public com.google.protobuf.Parser<MultiResponse> getParserForType() {
32799       return PARSER;
32800     }
32801 
32802     private int bitField0_;
32803     // repeated .RegionActionResult regionActionResult = 1;
32804     public static final int REGIONACTIONRESULT_FIELD_NUMBER = 1;
32805     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_;
32806     /**
32807      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32808      */
getRegionActionResultList()32809     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
32810       return regionActionResult_;
32811     }
32812     /**
32813      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32814      */
32815     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultOrBuilderList()32816         getRegionActionResultOrBuilderList() {
32817       return regionActionResult_;
32818     }
32819     /**
32820      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32821      */
getRegionActionResultCount()32822     public int getRegionActionResultCount() {
32823       return regionActionResult_.size();
32824     }
32825     /**
32826      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32827      */
getRegionActionResult(int index)32828     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
32829       return regionActionResult_.get(index);
32830     }
32831     /**
32832      * <code>repeated .RegionActionResult regionActionResult = 1;</code>
32833      */
getRegionActionResultOrBuilder( int index)32834     public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
32835         int index) {
32836       return regionActionResult_.get(index);
32837     }
32838 
32839     // optional bool processed = 2;
32840     public static final int PROCESSED_FIELD_NUMBER = 2;
32841     private boolean processed_;
32842     /**
32843      * <code>optional bool processed = 2;</code>
32844      *
32845      * <pre>
32846      * used for mutate to indicate processed only
32847      * </pre>
32848      */
hasProcessed()32849     public boolean hasProcessed() {
32850       return ((bitField0_ & 0x00000001) == 0x00000001);
32851     }
32852     /**
32853      * <code>optional bool processed = 2;</code>
32854      *
32855      * <pre>
32856      * used for mutate to indicate processed only
32857      * </pre>
32858      */
getProcessed()32859     public boolean getProcessed() {
32860       return processed_;
32861     }
32862 
initFields()32863     private void initFields() {
32864       regionActionResult_ = java.util.Collections.emptyList();
32865       processed_ = false;
32866     }
32867     private byte memoizedIsInitialized = -1;
isInitialized()32868     public final boolean isInitialized() {
32869       byte isInitialized = memoizedIsInitialized;
32870       if (isInitialized != -1) return isInitialized == 1;
32871 
32872       for (int i = 0; i < getRegionActionResultCount(); i++) {
32873         if (!getRegionActionResult(i).isInitialized()) {
32874           memoizedIsInitialized = 0;
32875           return false;
32876         }
32877       }
32878       memoizedIsInitialized = 1;
32879       return true;
32880     }
32881 
writeTo(com.google.protobuf.CodedOutputStream output)32882     public void writeTo(com.google.protobuf.CodedOutputStream output)
32883                         throws java.io.IOException {
32884       getSerializedSize();
32885       for (int i = 0; i < regionActionResult_.size(); i++) {
32886         output.writeMessage(1, regionActionResult_.get(i));
32887       }
32888       if (((bitField0_ & 0x00000001) == 0x00000001)) {
32889         output.writeBool(2, processed_);
32890       }
32891       getUnknownFields().writeTo(output);
32892     }
32893 
32894     private int memoizedSerializedSize = -1;
getSerializedSize()32895     public int getSerializedSize() {
32896       int size = memoizedSerializedSize;
32897       if (size != -1) return size;
32898 
32899       size = 0;
32900       for (int i = 0; i < regionActionResult_.size(); i++) {
32901         size += com.google.protobuf.CodedOutputStream
32902           .computeMessageSize(1, regionActionResult_.get(i));
32903       }
32904       if (((bitField0_ & 0x00000001) == 0x00000001)) {
32905         size += com.google.protobuf.CodedOutputStream
32906           .computeBoolSize(2, processed_);
32907       }
32908       size += getUnknownFields().getSerializedSize();
32909       memoizedSerializedSize = size;
32910       return size;
32911     }
32912 
32913     private static final long serialVersionUID = 0L;
32914     @java.lang.Override
writeReplace()32915     protected java.lang.Object writeReplace()
32916         throws java.io.ObjectStreamException {
32917       return super.writeReplace();
32918     }
32919 
32920     @java.lang.Override
equals(final java.lang.Object obj)32921     public boolean equals(final java.lang.Object obj) {
32922       if (obj == this) {
32923        return true;
32924       }
32925       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)) {
32926         return super.equals(obj);
32927       }
32928       org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) obj;
32929 
32930       boolean result = true;
32931       result = result && getRegionActionResultList()
32932           .equals(other.getRegionActionResultList());
32933       result = result && (hasProcessed() == other.hasProcessed());
32934       if (hasProcessed()) {
32935         result = result && (getProcessed()
32936             == other.getProcessed());
32937       }
32938       result = result &&
32939           getUnknownFields().equals(other.getUnknownFields());
32940       return result;
32941     }
32942 
32943     private int memoizedHashCode = 0;
32944     @java.lang.Override
hashCode()32945     public int hashCode() {
32946       if (memoizedHashCode != 0) {
32947         return memoizedHashCode;
32948       }
32949       int hash = 41;
32950       hash = (19 * hash) + getDescriptorForType().hashCode();
32951       if (getRegionActionResultCount() > 0) {
32952         hash = (37 * hash) + REGIONACTIONRESULT_FIELD_NUMBER;
32953         hash = (53 * hash) + getRegionActionResultList().hashCode();
32954       }
32955       if (hasProcessed()) {
32956         hash = (37 * hash) + PROCESSED_FIELD_NUMBER;
32957         hash = (53 * hash) + hashBoolean(getProcessed());
32958       }
32959       hash = (29 * hash) + getUnknownFields().hashCode();
32960       memoizedHashCode = hash;
32961       return hash;
32962     }
32963 
parseFrom( com.google.protobuf.ByteString data)32964     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
32965         com.google.protobuf.ByteString data)
32966         throws com.google.protobuf.InvalidProtocolBufferException {
32967       return PARSER.parseFrom(data);
32968     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32969     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
32970         com.google.protobuf.ByteString data,
32971         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32972         throws com.google.protobuf.InvalidProtocolBufferException {
32973       return PARSER.parseFrom(data, extensionRegistry);
32974     }
parseFrom(byte[] data)32975     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(byte[] data)
32976         throws com.google.protobuf.InvalidProtocolBufferException {
32977       return PARSER.parseFrom(data);
32978     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32979     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
32980         byte[] data,
32981         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32982         throws com.google.protobuf.InvalidProtocolBufferException {
32983       return PARSER.parseFrom(data, extensionRegistry);
32984     }
parseFrom(java.io.InputStream input)32985     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(java.io.InputStream input)
32986         throws java.io.IOException {
32987       return PARSER.parseFrom(input);
32988     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32989     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
32990         java.io.InputStream input,
32991         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
32992         throws java.io.IOException {
32993       return PARSER.parseFrom(input, extensionRegistry);
32994     }
parseDelimitedFrom(java.io.InputStream input)32995     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input)
32996         throws java.io.IOException {
32997       return PARSER.parseDelimitedFrom(input);
32998     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)32999     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseDelimitedFrom(
33000         java.io.InputStream input,
33001         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33002         throws java.io.IOException {
33003       return PARSER.parseDelimitedFrom(input, extensionRegistry);
33004     }
parseFrom( com.google.protobuf.CodedInputStream input)33005     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
33006         com.google.protobuf.CodedInputStream input)
33007         throws java.io.IOException {
33008       return PARSER.parseFrom(input);
33009     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33010     public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parseFrom(
33011         com.google.protobuf.CodedInputStream input,
33012         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33013         throws java.io.IOException {
33014       return PARSER.parseFrom(input, extensionRegistry);
33015     }
33016 
newBuilder()33017     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()33018     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype)33019     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse prototype) {
33020       return newBuilder().mergeFrom(prototype);
33021     }
toBuilder()33022     public Builder toBuilder() { return newBuilder(this); }
33023 
33024     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)33025     protected Builder newBuilderForType(
33026         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33027       Builder builder = new Builder(parent);
33028       return builder;
33029     }
33030     /**
33031      * Protobuf type {@code MultiResponse}
33032      */
33033     public static final class Builder extends
33034         com.google.protobuf.GeneratedMessage.Builder<Builder>
33035        implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponseOrBuilder {
33036       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()33037           getDescriptor() {
33038         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor;
33039       }
33040 
33041       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()33042           internalGetFieldAccessorTable() {
33043         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_fieldAccessorTable
33044             .ensureFieldAccessorsInitialized(
33045                 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.Builder.class);
33046       }
33047 
33048       // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.newBuilder()
Builder()33049       private Builder() {
33050         maybeForceBuilderInitialization();
33051       }
33052 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)33053       private Builder(
33054           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
33055         super(parent);
33056         maybeForceBuilderInitialization();
33057       }
maybeForceBuilderInitialization()33058       private void maybeForceBuilderInitialization() {
33059         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
33060           getRegionActionResultFieldBuilder();
33061         }
33062       }
create()33063       private static Builder create() {
33064         return new Builder();
33065       }
33066 
clear()33067       public Builder clear() {
33068         super.clear();
33069         if (regionActionResultBuilder_ == null) {
33070           regionActionResult_ = java.util.Collections.emptyList();
33071           bitField0_ = (bitField0_ & ~0x00000001);
33072         } else {
33073           regionActionResultBuilder_.clear();
33074         }
33075         processed_ = false;
33076         bitField0_ = (bitField0_ & ~0x00000002);
33077         return this;
33078       }
33079 
clone()33080       public Builder clone() {
33081         return create().mergeFrom(buildPartial());
33082       }
33083 
33084       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()33085           getDescriptorForType() {
33086         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MultiResponse_descriptor;
33087       }
33088 
getDefaultInstanceForType()33089       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse getDefaultInstanceForType() {
33090         return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
33091       }
33092 
build()33093       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse build() {
33094         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = buildPartial();
33095         if (!result.isInitialized()) {
33096           throw newUninitializedMessageException(result);
33097         }
33098         return result;
33099       }
33100 
buildPartial()33101       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse buildPartial() {
33102         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse(this);
33103         int from_bitField0_ = bitField0_;
33104         int to_bitField0_ = 0;
33105         if (regionActionResultBuilder_ == null) {
33106           if (((bitField0_ & 0x00000001) == 0x00000001)) {
33107             regionActionResult_ = java.util.Collections.unmodifiableList(regionActionResult_);
33108             bitField0_ = (bitField0_ & ~0x00000001);
33109           }
33110           result.regionActionResult_ = regionActionResult_;
33111         } else {
33112           result.regionActionResult_ = regionActionResultBuilder_.build();
33113         }
33114         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
33115           to_bitField0_ |= 0x00000001;
33116         }
33117         result.processed_ = processed_;
33118         result.bitField0_ = to_bitField0_;
33119         onBuilt();
33120         return result;
33121       }
33122 
mergeFrom(com.google.protobuf.Message other)33123       public Builder mergeFrom(com.google.protobuf.Message other) {
33124         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) {
33125           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse)other);
33126         } else {
33127           super.mergeFrom(other);
33128           return this;
33129         }
33130       }
33131 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other)33132       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse other) {
33133         if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()) return this;
33134         if (regionActionResultBuilder_ == null) {
33135           if (!other.regionActionResult_.isEmpty()) {
33136             if (regionActionResult_.isEmpty()) {
33137               regionActionResult_ = other.regionActionResult_;
33138               bitField0_ = (bitField0_ & ~0x00000001);
33139             } else {
33140               ensureRegionActionResultIsMutable();
33141               regionActionResult_.addAll(other.regionActionResult_);
33142             }
33143             onChanged();
33144           }
33145         } else {
33146           if (!other.regionActionResult_.isEmpty()) {
33147             if (regionActionResultBuilder_.isEmpty()) {
33148               regionActionResultBuilder_.dispose();
33149               regionActionResultBuilder_ = null;
33150               regionActionResult_ = other.regionActionResult_;
33151               bitField0_ = (bitField0_ & ~0x00000001);
33152               regionActionResultBuilder_ =
33153                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
33154                    getRegionActionResultFieldBuilder() : null;
33155             } else {
33156               regionActionResultBuilder_.addAllMessages(other.regionActionResult_);
33157             }
33158           }
33159         }
33160         if (other.hasProcessed()) {
33161           setProcessed(other.getProcessed());
33162         }
33163         this.mergeUnknownFields(other.getUnknownFields());
33164         return this;
33165       }
33166 
isInitialized()33167       public final boolean isInitialized() {
33168         for (int i = 0; i < getRegionActionResultCount(); i++) {
33169           if (!getRegionActionResult(i).isInitialized()) {
33170 
33171             return false;
33172           }
33173         }
33174         return true;
33175       }
33176 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)33177       public Builder mergeFrom(
33178           com.google.protobuf.CodedInputStream input,
33179           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
33180           throws java.io.IOException {
33181         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse parsedMessage = null;
33182         try {
33183           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
33184         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
33185           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) e.getUnfinishedMessage();
33186           throw e;
33187         } finally {
33188           if (parsedMessage != null) {
33189             mergeFrom(parsedMessage);
33190           }
33191         }
33192         return this;
33193       }
33194       private int bitField0_;
33195 
33196       // repeated .RegionActionResult regionActionResult = 1;
33197       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> regionActionResult_ =
33198         java.util.Collections.emptyList();
ensureRegionActionResultIsMutable()33199       private void ensureRegionActionResultIsMutable() {
33200         if (!((bitField0_ & 0x00000001) == 0x00000001)) {
33201           regionActionResult_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult>(regionActionResult_);
33202           bitField0_ |= 0x00000001;
33203          }
33204       }
33205 
33206       private com.google.protobuf.RepeatedFieldBuilder<
33207           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder> regionActionResultBuilder_;
33208 
33209       /**
33210        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33211        */
getRegionActionResultList()33212       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> getRegionActionResultList() {
33213         if (regionActionResultBuilder_ == null) {
33214           return java.util.Collections.unmodifiableList(regionActionResult_);
33215         } else {
33216           return regionActionResultBuilder_.getMessageList();
33217         }
33218       }
33219       /**
33220        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33221        */
getRegionActionResultCount()33222       public int getRegionActionResultCount() {
33223         if (regionActionResultBuilder_ == null) {
33224           return regionActionResult_.size();
33225         } else {
33226           return regionActionResultBuilder_.getCount();
33227         }
33228       }
33229       /**
33230        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33231        */
getRegionActionResult(int index)33232       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult getRegionActionResult(int index) {
33233         if (regionActionResultBuilder_ == null) {
33234           return regionActionResult_.get(index);
33235         } else {
33236           return regionActionResultBuilder_.getMessage(index);
33237         }
33238       }
33239       /**
33240        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33241        */
setRegionActionResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value)33242       public Builder setRegionActionResult(
33243           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
33244         if (regionActionResultBuilder_ == null) {
33245           if (value == null) {
33246             throw new NullPointerException();
33247           }
33248           ensureRegionActionResultIsMutable();
33249           regionActionResult_.set(index, value);
33250           onChanged();
33251         } else {
33252           regionActionResultBuilder_.setMessage(index, value);
33253         }
33254         return this;
33255       }
33256       /**
33257        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33258        */
setRegionActionResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue)33259       public Builder setRegionActionResult(
33260           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
33261         if (regionActionResultBuilder_ == null) {
33262           ensureRegionActionResultIsMutable();
33263           regionActionResult_.set(index, builderForValue.build());
33264           onChanged();
33265         } else {
33266           regionActionResultBuilder_.setMessage(index, builderForValue.build());
33267         }
33268         return this;
33269       }
33270       /**
33271        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33272        */
addRegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value)33273       public Builder addRegionActionResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
33274         if (regionActionResultBuilder_ == null) {
33275           if (value == null) {
33276             throw new NullPointerException();
33277           }
33278           ensureRegionActionResultIsMutable();
33279           regionActionResult_.add(value);
33280           onChanged();
33281         } else {
33282           regionActionResultBuilder_.addMessage(value);
33283         }
33284         return this;
33285       }
33286       /**
33287        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33288        */
addRegionActionResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value)33289       public Builder addRegionActionResult(
33290           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult value) {
33291         if (regionActionResultBuilder_ == null) {
33292           if (value == null) {
33293             throw new NullPointerException();
33294           }
33295           ensureRegionActionResultIsMutable();
33296           regionActionResult_.add(index, value);
33297           onChanged();
33298         } else {
33299           regionActionResultBuilder_.addMessage(index, value);
33300         }
33301         return this;
33302       }
33303       /**
33304        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33305        */
addRegionActionResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue)33306       public Builder addRegionActionResult(
33307           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
33308         if (regionActionResultBuilder_ == null) {
33309           ensureRegionActionResultIsMutable();
33310           regionActionResult_.add(builderForValue.build());
33311           onChanged();
33312         } else {
33313           regionActionResultBuilder_.addMessage(builderForValue.build());
33314         }
33315         return this;
33316       }
33317       /**
33318        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33319        */
addRegionActionResult( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue)33320       public Builder addRegionActionResult(
33321           int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder builderForValue) {
33322         if (regionActionResultBuilder_ == null) {
33323           ensureRegionActionResultIsMutable();
33324           regionActionResult_.add(index, builderForValue.build());
33325           onChanged();
33326         } else {
33327           regionActionResultBuilder_.addMessage(index, builderForValue.build());
33328         }
33329         return this;
33330       }
33331       /**
33332        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33333        */
addAllRegionActionResult( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> values)33334       public Builder addAllRegionActionResult(
33335           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult> values) {
33336         if (regionActionResultBuilder_ == null) {
33337           ensureRegionActionResultIsMutable();
33338           super.addAll(values, regionActionResult_);
33339           onChanged();
33340         } else {
33341           regionActionResultBuilder_.addAllMessages(values);
33342         }
33343         return this;
33344       }
33345       /**
33346        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33347        */
clearRegionActionResult()33348       public Builder clearRegionActionResult() {
33349         if (regionActionResultBuilder_ == null) {
33350           regionActionResult_ = java.util.Collections.emptyList();
33351           bitField0_ = (bitField0_ & ~0x00000001);
33352           onChanged();
33353         } else {
33354           regionActionResultBuilder_.clear();
33355         }
33356         return this;
33357       }
33358       /**
33359        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33360        */
removeRegionActionResult(int index)33361       public Builder removeRegionActionResult(int index) {
33362         if (regionActionResultBuilder_ == null) {
33363           ensureRegionActionResultIsMutable();
33364           regionActionResult_.remove(index);
33365           onChanged();
33366         } else {
33367           regionActionResultBuilder_.remove(index);
33368         }
33369         return this;
33370       }
33371       /**
33372        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33373        */
getRegionActionResultBuilder( int index)33374       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder getRegionActionResultBuilder(
33375           int index) {
33376         return getRegionActionResultFieldBuilder().getBuilder(index);
33377       }
33378       /**
33379        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33380        */
getRegionActionResultOrBuilder( int index)33381       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder getRegionActionResultOrBuilder(
33382           int index) {
33383         if (regionActionResultBuilder_ == null) {
33384           return regionActionResult_.get(index);  } else {
33385           return regionActionResultBuilder_.getMessageOrBuilder(index);
33386         }
33387       }
33388       /**
33389        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33390        */
33391       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultOrBuilderList()33392            getRegionActionResultOrBuilderList() {
33393         if (regionActionResultBuilder_ != null) {
33394           return regionActionResultBuilder_.getMessageOrBuilderList();
33395         } else {
33396           return java.util.Collections.unmodifiableList(regionActionResult_);
33397         }
33398       }
33399       /**
33400        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33401        */
addRegionActionResultBuilder()33402       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder() {
33403         return getRegionActionResultFieldBuilder().addBuilder(
33404             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
33405       }
33406       /**
33407        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33408        */
addRegionActionResultBuilder( int index)33409       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder addRegionActionResultBuilder(
33410           int index) {
33411         return getRegionActionResultFieldBuilder().addBuilder(
33412             index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.getDefaultInstance());
33413       }
33414       /**
33415        * <code>repeated .RegionActionResult regionActionResult = 1;</code>
33416        */
33417       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder>
getRegionActionResultBuilderList()33418            getRegionActionResultBuilderList() {
33419         return getRegionActionResultFieldBuilder().getBuilderList();
33420       }
33421       private com.google.protobuf.RepeatedFieldBuilder<
33422           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>
getRegionActionResultFieldBuilder()33423           getRegionActionResultFieldBuilder() {
33424         if (regionActionResultBuilder_ == null) {
33425           regionActionResultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
33426               org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResult.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionActionResultOrBuilder>(
33427                   regionActionResult_,
33428                   ((bitField0_ & 0x00000001) == 0x00000001),
33429                   getParentForChildren(),
33430                   isClean());
33431           regionActionResult_ = null;
33432         }
33433         return regionActionResultBuilder_;
33434       }
33435 
33436       // optional bool processed = 2;
33437       private boolean processed_ ;
33438       /**
33439        * <code>optional bool processed = 2;</code>
33440        *
33441        * <pre>
33442        * used for mutate to indicate processed only
33443        * </pre>
33444        */
hasProcessed()33445       public boolean hasProcessed() {
33446         return ((bitField0_ & 0x00000002) == 0x00000002);
33447       }
33448       /**
33449        * <code>optional bool processed = 2;</code>
33450        *
33451        * <pre>
33452        * used for mutate to indicate processed only
33453        * </pre>
33454        */
getProcessed()33455       public boolean getProcessed() {
33456         return processed_;
33457       }
33458       /**
33459        * <code>optional bool processed = 2;</code>
33460        *
33461        * <pre>
33462        * used for mutate to indicate processed only
33463        * </pre>
33464        */
setProcessed(boolean value)33465       public Builder setProcessed(boolean value) {
33466         bitField0_ |= 0x00000002;
33467         processed_ = value;
33468         onChanged();
33469         return this;
33470       }
33471       /**
33472        * <code>optional bool processed = 2;</code>
33473        *
33474        * <pre>
33475        * used for mutate to indicate processed only
33476        * </pre>
33477        */
clearProcessed()33478       public Builder clearProcessed() {
33479         bitField0_ = (bitField0_ & ~0x00000002);
33480         processed_ = false;
33481         onChanged();
33482         return this;
33483       }
33484 
33485       // @@protoc_insertion_point(builder_scope:MultiResponse)
33486     }
33487 
33488     static {
33489       defaultInstance = new MultiResponse(true);
defaultInstance.initFields()33490       defaultInstance.initFields();
33491     }
33492 
33493     // @@protoc_insertion_point(class_scope:MultiResponse)
33494   }
33495 
33496   /**
33497    * Protobuf service {@code ClientService}
33498    */
33499   public static abstract class ClientService
33500       implements com.google.protobuf.Service {
ClientService()33501     protected ClientService() {}
33502 
33503     public interface Interface {
33504       /**
33505        * <code>rpc Get(.GetRequest) returns (.GetResponse);</code>
33506        */
get( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done)33507       public abstract void get(
33508           com.google.protobuf.RpcController controller,
33509           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
33510           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
33511 
33512       /**
33513        * <code>rpc Mutate(.MutateRequest) returns (.MutateResponse);</code>
33514        */
mutate( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done)33515       public abstract void mutate(
33516           com.google.protobuf.RpcController controller,
33517           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
33518           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
33519 
33520       /**
33521        * <code>rpc Scan(.ScanRequest) returns (.ScanResponse);</code>
33522        */
scan( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done)33523       public abstract void scan(
33524           com.google.protobuf.RpcController controller,
33525           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
33526           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
33527 
33528       /**
33529        * <code>rpc BulkLoadHFile(.BulkLoadHFileRequest) returns (.BulkLoadHFileResponse);</code>
33530        */
bulkLoadHFile( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done)33531       public abstract void bulkLoadHFile(
33532           com.google.protobuf.RpcController controller,
33533           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
33534           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
33535 
33536       /**
33537        * <code>rpc ExecService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
33538        */
execService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done)33539       public abstract void execService(
33540           com.google.protobuf.RpcController controller,
33541           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33542           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
33543 
33544       /**
33545        * <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
33546        */
execRegionServerService( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done)33547       public abstract void execRegionServerService(
33548           com.google.protobuf.RpcController controller,
33549           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33550           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
33551 
33552       /**
33553        * <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
33554        */
multi( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done)33555       public abstract void multi(
33556           com.google.protobuf.RpcController controller,
33557           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
33558           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
33559 
33560     }
33561 
newReflectiveService( final Interface impl)33562     public static com.google.protobuf.Service newReflectiveService(
33563         final Interface impl) {
33564       return new ClientService() {
33565         @java.lang.Override
33566         public  void get(
33567             com.google.protobuf.RpcController controller,
33568             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
33569             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
33570           impl.get(controller, request, done);
33571         }
33572 
33573         @java.lang.Override
33574         public  void mutate(
33575             com.google.protobuf.RpcController controller,
33576             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
33577             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
33578           impl.mutate(controller, request, done);
33579         }
33580 
33581         @java.lang.Override
33582         public  void scan(
33583             com.google.protobuf.RpcController controller,
33584             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
33585             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
33586           impl.scan(controller, request, done);
33587         }
33588 
33589         @java.lang.Override
33590         public  void bulkLoadHFile(
33591             com.google.protobuf.RpcController controller,
33592             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
33593             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
33594           impl.bulkLoadHFile(controller, request, done);
33595         }
33596 
33597         @java.lang.Override
33598         public  void execService(
33599             com.google.protobuf.RpcController controller,
33600             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33601             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
33602           impl.execService(controller, request, done);
33603         }
33604 
33605         @java.lang.Override
33606         public  void execRegionServerService(
33607             com.google.protobuf.RpcController controller,
33608             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33609             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
33610           impl.execRegionServerService(controller, request, done);
33611         }
33612 
33613         @java.lang.Override
33614         public  void multi(
33615             com.google.protobuf.RpcController controller,
33616             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
33617             com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
33618           impl.multi(controller, request, done);
33619         }
33620 
33621       };
33622     }
33623 
33624     public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl)33625         newReflectiveBlockingService(final BlockingInterface impl) {
33626       return new com.google.protobuf.BlockingService() {
33627         public final com.google.protobuf.Descriptors.ServiceDescriptor
33628             getDescriptorForType() {
33629           return getDescriptor();
33630         }
33631 
33632         public final com.google.protobuf.Message callBlockingMethod(
33633             com.google.protobuf.Descriptors.MethodDescriptor method,
33634             com.google.protobuf.RpcController controller,
33635             com.google.protobuf.Message request)
33636             throws com.google.protobuf.ServiceException {
33637           if (method.getService() != getDescriptor()) {
33638             throw new java.lang.IllegalArgumentException(
33639               "Service.callBlockingMethod() given method descriptor for " +
33640               "wrong service type.");
33641           }
33642           switch(method.getIndex()) {
33643             case 0:
33644               return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request);
33645             case 1:
33646               return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request);
33647             case 2:
33648               return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request);
33649             case 3:
33650               return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request);
33651             case 4:
33652               return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
33653             case 5:
33654               return impl.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
33655             case 6:
33656               return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
33657             default:
33658               throw new java.lang.AssertionError("Can't get here.");
33659           }
33660         }
33661 
33662         public final com.google.protobuf.Message
33663             getRequestPrototype(
33664             com.google.protobuf.Descriptors.MethodDescriptor method) {
33665           if (method.getService() != getDescriptor()) {
33666             throw new java.lang.IllegalArgumentException(
33667               "Service.getRequestPrototype() given method " +
33668               "descriptor for wrong service type.");
33669           }
33670           switch(method.getIndex()) {
33671             case 0:
33672               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
33673             case 1:
33674               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
33675             case 2:
33676               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
33677             case 3:
33678               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
33679             case 4:
33680               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
33681             case 5:
33682               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
33683             case 6:
33684               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
33685             default:
33686               throw new java.lang.AssertionError("Can't get here.");
33687           }
33688         }
33689 
33690         public final com.google.protobuf.Message
33691             getResponsePrototype(
33692             com.google.protobuf.Descriptors.MethodDescriptor method) {
33693           if (method.getService() != getDescriptor()) {
33694             throw new java.lang.IllegalArgumentException(
33695               "Service.getResponsePrototype() given method " +
33696               "descriptor for wrong service type.");
33697           }
33698           switch(method.getIndex()) {
33699             case 0:
33700               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
33701             case 1:
33702               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
33703             case 2:
33704               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
33705             case 3:
33706               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
33707             case 4:
33708               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
33709             case 5:
33710               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
33711             case 6:
33712               return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
33713             default:
33714               throw new java.lang.AssertionError("Can't get here.");
33715           }
33716         }
33717 
33718       };
33719     }
33720 
33721     /**
33722      * <code>rpc Get(.GetRequest) returns (.GetResponse);</code>
33723      */
33724     public abstract void get(
33725         com.google.protobuf.RpcController controller,
33726         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
33727         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done);
33728 
33729     /**
33730      * <code>rpc Mutate(.MutateRequest) returns (.MutateResponse);</code>
33731      */
33732     public abstract void mutate(
33733         com.google.protobuf.RpcController controller,
33734         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
33735         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done);
33736 
33737     /**
33738      * <code>rpc Scan(.ScanRequest) returns (.ScanResponse);</code>
33739      */
33740     public abstract void scan(
33741         com.google.protobuf.RpcController controller,
33742         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
33743         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done);
33744 
33745     /**
33746      * <code>rpc BulkLoadHFile(.BulkLoadHFileRequest) returns (.BulkLoadHFileResponse);</code>
33747      */
33748     public abstract void bulkLoadHFile(
33749         com.google.protobuf.RpcController controller,
33750         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
33751         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done);
33752 
33753     /**
33754      * <code>rpc ExecService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
33755      */
33756     public abstract void execService(
33757         com.google.protobuf.RpcController controller,
33758         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33759         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
33760 
33761     /**
33762      * <code>rpc ExecRegionServerService(.CoprocessorServiceRequest) returns (.CoprocessorServiceResponse);</code>
33763      */
33764     public abstract void execRegionServerService(
33765         com.google.protobuf.RpcController controller,
33766         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33767         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done);
33768 
33769     /**
33770      * <code>rpc Multi(.MultiRequest) returns (.MultiResponse);</code>
33771      */
33772     public abstract void multi(
33773         com.google.protobuf.RpcController controller,
33774         org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
33775         com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done);
33776 
33777     public static final
33778         com.google.protobuf.Descriptors.ServiceDescriptor
33779         getDescriptor() {
33780       return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getServices().get(0);
33781     }
33782     public final com.google.protobuf.Descriptors.ServiceDescriptor
33783         getDescriptorForType() {
33784       return getDescriptor();
33785     }
33786 
33787     public final void callMethod(
33788         com.google.protobuf.Descriptors.MethodDescriptor method,
33789         com.google.protobuf.RpcController controller,
33790         com.google.protobuf.Message request,
33791         com.google.protobuf.RpcCallback<
33792           com.google.protobuf.Message> done) {
33793       if (method.getService() != getDescriptor()) {
33794         throw new java.lang.IllegalArgumentException(
33795           "Service.callMethod() given method descriptor for wrong " +
33796           "service type.");
33797       }
33798       switch(method.getIndex()) {
33799         case 0:
33800           this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)request,
33801             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse>specializeCallback(
33802               done));
33803           return;
33804         case 1:
33805           this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)request,
33806             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse>specializeCallback(
33807               done));
33808           return;
33809         case 2:
33810           this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)request,
33811             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse>specializeCallback(
33812               done));
33813           return;
33814         case 3:
33815           this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request,
33816             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse>specializeCallback(
33817               done));
33818           return;
33819         case 4:
33820           this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
33821             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
33822               done));
33823           return;
33824         case 5:
33825           this.execRegionServerService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
33826             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse>specializeCallback(
33827               done));
33828           return;
33829         case 6:
33830           this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
33831             com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse>specializeCallback(
33832               done));
33833           return;
33834         default:
33835           throw new java.lang.AssertionError("Can't get here.");
33836       }
33837     }
33838 
33839     public final com.google.protobuf.Message
33840         getRequestPrototype(
33841         com.google.protobuf.Descriptors.MethodDescriptor method) {
33842       if (method.getService() != getDescriptor()) {
33843         throw new java.lang.IllegalArgumentException(
33844           "Service.getRequestPrototype() given method " +
33845           "descriptor for wrong service type.");
33846       }
33847       switch(method.getIndex()) {
33848         case 0:
33849           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance();
33850         case 1:
33851           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance();
33852         case 2:
33853           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance();
33854         case 3:
33855           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
33856         case 4:
33857           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
33858         case 5:
33859           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
33860         case 6:
33861           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
33862         default:
33863           throw new java.lang.AssertionError("Can't get here.");
33864       }
33865     }
33866 
33867     public final com.google.protobuf.Message
33868         getResponsePrototype(
33869         com.google.protobuf.Descriptors.MethodDescriptor method) {
33870       if (method.getService() != getDescriptor()) {
33871         throw new java.lang.IllegalArgumentException(
33872           "Service.getResponsePrototype() given method " +
33873           "descriptor for wrong service type.");
33874       }
33875       switch(method.getIndex()) {
33876         case 0:
33877           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance();
33878         case 1:
33879           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance();
33880         case 2:
33881           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance();
33882         case 3:
33883           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
33884         case 4:
33885           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
33886         case 5:
33887           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
33888         case 6:
33889           return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
33890         default:
33891           throw new java.lang.AssertionError("Can't get here.");
33892       }
33893     }
33894 
33895     public static Stub newStub(
33896         com.google.protobuf.RpcChannel channel) {
33897       return new Stub(channel);
33898     }
33899 
33900     public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService implements Interface {
33901       private Stub(com.google.protobuf.RpcChannel channel) {
33902         this.channel = channel;
33903       }
33904 
33905       private final com.google.protobuf.RpcChannel channel;
33906 
33907       public com.google.protobuf.RpcChannel getChannel() {
33908         return channel;
33909       }
33910 
33911       public  void get(
33912           com.google.protobuf.RpcController controller,
33913           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request,
33914           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse> done) {
33915         channel.callMethod(
33916           getDescriptor().getMethods().get(0),
33917           controller,
33918           request,
33919           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(),
33920           com.google.protobuf.RpcUtil.generalizeCallback(
33921             done,
33922             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class,
33923             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()));
33924       }
33925 
33926       public  void mutate(
33927           com.google.protobuf.RpcController controller,
33928           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request,
33929           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse> done) {
33930         channel.callMethod(
33931           getDescriptor().getMethods().get(1),
33932           controller,
33933           request,
33934           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(),
33935           com.google.protobuf.RpcUtil.generalizeCallback(
33936             done,
33937             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class,
33938             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()));
33939       }
33940 
33941       public  void scan(
33942           com.google.protobuf.RpcController controller,
33943           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request,
33944           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse> done) {
33945         channel.callMethod(
33946           getDescriptor().getMethods().get(2),
33947           controller,
33948           request,
33949           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance(),
33950           com.google.protobuf.RpcUtil.generalizeCallback(
33951             done,
33952             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class,
33953             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance()));
33954       }
33955 
33956       public  void bulkLoadHFile(
33957           com.google.protobuf.RpcController controller,
33958           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
33959           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse> done) {
33960         channel.callMethod(
33961           getDescriptor().getMethods().get(3),
33962           controller,
33963           request,
33964           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance(),
33965           com.google.protobuf.RpcUtil.generalizeCallback(
33966             done,
33967             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class,
33968             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()));
33969       }
33970 
33971       public  void execService(
33972           com.google.protobuf.RpcController controller,
33973           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33974           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
33975         channel.callMethod(
33976           getDescriptor().getMethods().get(4),
33977           controller,
33978           request,
33979           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
33980           com.google.protobuf.RpcUtil.generalizeCallback(
33981             done,
33982             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
33983             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
33984       }
33985 
33986       public  void execRegionServerService(
33987           com.google.protobuf.RpcController controller,
33988           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
33989           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse> done) {
33990         channel.callMethod(
33991           getDescriptor().getMethods().get(5),
33992           controller,
33993           request,
33994           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
33995           com.google.protobuf.RpcUtil.generalizeCallback(
33996             done,
33997             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
33998             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance()));
33999       }
34000 
34001       public  void multi(
34002           com.google.protobuf.RpcController controller,
34003           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
34004           com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse> done) {
34005         channel.callMethod(
34006           getDescriptor().getMethods().get(6),
34007           controller,
34008           request,
34009           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
34010           com.google.protobuf.RpcUtil.generalizeCallback(
34011             done,
34012             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.class,
34013             org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance()));
34014       }
34015     }
34016 
34017     public static BlockingInterface newBlockingStub(
34018         com.google.protobuf.BlockingRpcChannel channel) {
34019       return new BlockingStub(channel);
34020     }
34021 
34022     public interface BlockingInterface {
34023       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
34024           com.google.protobuf.RpcController controller,
34025           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
34026           throws com.google.protobuf.ServiceException;
34027 
34028       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
34029           com.google.protobuf.RpcController controller,
34030           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
34031           throws com.google.protobuf.ServiceException;
34032 
34033       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
34034           com.google.protobuf.RpcController controller,
34035           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
34036           throws com.google.protobuf.ServiceException;
34037 
34038       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
34039           com.google.protobuf.RpcController controller,
34040           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
34041           throws com.google.protobuf.ServiceException;
34042 
34043       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
34044           com.google.protobuf.RpcController controller,
34045           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
34046           throws com.google.protobuf.ServiceException;
34047 
34048       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
34049           com.google.protobuf.RpcController controller,
34050           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
34051           throws com.google.protobuf.ServiceException;
34052 
34053       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
34054           com.google.protobuf.RpcController controller,
34055           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
34056           throws com.google.protobuf.ServiceException;
34057     }
34058 
34059     private static final class BlockingStub implements BlockingInterface {
34060       private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
34061         this.channel = channel;
34062       }
34063 
34064       private final com.google.protobuf.BlockingRpcChannel channel;
34065 
34066       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse get(
34067           com.google.protobuf.RpcController controller,
34068           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest request)
34069           throws com.google.protobuf.ServiceException {
34070         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) channel.callBlockingMethod(
34071           getDescriptor().getMethods().get(0),
34072           controller,
34073           request,
34074           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance());
34075       }
34076 
34077 
34078       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse mutate(
34079           com.google.protobuf.RpcController controller,
34080           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest request)
34081           throws com.google.protobuf.ServiceException {
34082         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) channel.callBlockingMethod(
34083           getDescriptor().getMethods().get(1),
34084           controller,
34085           request,
34086           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance());
34087       }
34088 
34089 
34090       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse scan(
34091           com.google.protobuf.RpcController controller,
34092           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest request)
34093           throws com.google.protobuf.ServiceException {
34094         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) channel.callBlockingMethod(
34095           getDescriptor().getMethods().get(2),
34096           controller,
34097           request,
34098           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.getDefaultInstance());
34099       }
34100 
34101 
34102       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse bulkLoadHFile(
34103           com.google.protobuf.RpcController controller,
34104           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
34105           throws com.google.protobuf.ServiceException {
34106         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod(
34107           getDescriptor().getMethods().get(3),
34108           controller,
34109           request,
34110           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance());
34111       }
34112 
34113 
34114       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
34115           com.google.protobuf.RpcController controller,
34116           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
34117           throws com.google.protobuf.ServiceException {
34118         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
34119           getDescriptor().getMethods().get(4),
34120           controller,
34121           request,
34122           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
34123       }
34124 
34125 
34126       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execRegionServerService(
34127           com.google.protobuf.RpcController controller,
34128           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
34129           throws com.google.protobuf.ServiceException {
34130         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
34131           getDescriptor().getMethods().get(5),
34132           controller,
34133           request,
34134           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
34135       }
34136 
34137 
34138       public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse multi(
34139           com.google.protobuf.RpcController controller,
34140           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
34141           throws com.google.protobuf.ServiceException {
34142         return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
34143           getDescriptor().getMethods().get(6),
34144           controller,
34145           request,
34146           org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
34147       }
34148 
34149     }
34150 
34151     // @@protoc_insertion_point(class_scope:ClientService)
34152   }
34153 
34154   private static com.google.protobuf.Descriptors.Descriptor
34155     internal_static_Authorizations_descriptor;
34156   private static
34157     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34158       internal_static_Authorizations_fieldAccessorTable;
34159   private static com.google.protobuf.Descriptors.Descriptor
34160     internal_static_CellVisibility_descriptor;
34161   private static
34162     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34163       internal_static_CellVisibility_fieldAccessorTable;
34164   private static com.google.protobuf.Descriptors.Descriptor
34165     internal_static_Column_descriptor;
34166   private static
34167     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34168       internal_static_Column_fieldAccessorTable;
34169   private static com.google.protobuf.Descriptors.Descriptor
34170     internal_static_Get_descriptor;
34171   private static
34172     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34173       internal_static_Get_fieldAccessorTable;
34174   private static com.google.protobuf.Descriptors.Descriptor
34175     internal_static_Result_descriptor;
34176   private static
34177     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34178       internal_static_Result_fieldAccessorTable;
34179   private static com.google.protobuf.Descriptors.Descriptor
34180     internal_static_GetRequest_descriptor;
34181   private static
34182     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34183       internal_static_GetRequest_fieldAccessorTable;
34184   private static com.google.protobuf.Descriptors.Descriptor
34185     internal_static_GetResponse_descriptor;
34186   private static
34187     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34188       internal_static_GetResponse_fieldAccessorTable;
34189   private static com.google.protobuf.Descriptors.Descriptor
34190     internal_static_Condition_descriptor;
34191   private static
34192     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34193       internal_static_Condition_fieldAccessorTable;
34194   private static com.google.protobuf.Descriptors.Descriptor
34195     internal_static_MutationProto_descriptor;
34196   private static
34197     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34198       internal_static_MutationProto_fieldAccessorTable;
34199   private static com.google.protobuf.Descriptors.Descriptor
34200     internal_static_MutationProto_ColumnValue_descriptor;
34201   private static
34202     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34203       internal_static_MutationProto_ColumnValue_fieldAccessorTable;
34204   private static com.google.protobuf.Descriptors.Descriptor
34205     internal_static_MutationProto_ColumnValue_QualifierValue_descriptor;
34206   private static
34207     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34208       internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable;
34209   private static com.google.protobuf.Descriptors.Descriptor
34210     internal_static_MutateRequest_descriptor;
34211   private static
34212     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34213       internal_static_MutateRequest_fieldAccessorTable;
34214   private static com.google.protobuf.Descriptors.Descriptor
34215     internal_static_MutateResponse_descriptor;
34216   private static
34217     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34218       internal_static_MutateResponse_fieldAccessorTable;
34219   private static com.google.protobuf.Descriptors.Descriptor
34220     internal_static_Scan_descriptor;
34221   private static
34222     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34223       internal_static_Scan_fieldAccessorTable;
34224   private static com.google.protobuf.Descriptors.Descriptor
34225     internal_static_ScanRequest_descriptor;
34226   private static
34227     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34228       internal_static_ScanRequest_fieldAccessorTable;
34229   private static com.google.protobuf.Descriptors.Descriptor
34230     internal_static_ScanResponse_descriptor;
34231   private static
34232     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34233       internal_static_ScanResponse_fieldAccessorTable;
34234   private static com.google.protobuf.Descriptors.Descriptor
34235     internal_static_BulkLoadHFileRequest_descriptor;
34236   private static
34237     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34238       internal_static_BulkLoadHFileRequest_fieldAccessorTable;
34239   private static com.google.protobuf.Descriptors.Descriptor
34240     internal_static_BulkLoadHFileRequest_FamilyPath_descriptor;
34241   private static
34242     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34243       internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable;
34244   private static com.google.protobuf.Descriptors.Descriptor
34245     internal_static_BulkLoadHFileResponse_descriptor;
34246   private static
34247     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34248       internal_static_BulkLoadHFileResponse_fieldAccessorTable;
34249   private static com.google.protobuf.Descriptors.Descriptor
34250     internal_static_CoprocessorServiceCall_descriptor;
34251   private static
34252     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34253       internal_static_CoprocessorServiceCall_fieldAccessorTable;
34254   private static com.google.protobuf.Descriptors.Descriptor
34255     internal_static_CoprocessorServiceResult_descriptor;
34256   private static
34257     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34258       internal_static_CoprocessorServiceResult_fieldAccessorTable;
34259   private static com.google.protobuf.Descriptors.Descriptor
34260     internal_static_CoprocessorServiceRequest_descriptor;
34261   private static
34262     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34263       internal_static_CoprocessorServiceRequest_fieldAccessorTable;
34264   private static com.google.protobuf.Descriptors.Descriptor
34265     internal_static_CoprocessorServiceResponse_descriptor;
34266   private static
34267     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34268       internal_static_CoprocessorServiceResponse_fieldAccessorTable;
34269   private static com.google.protobuf.Descriptors.Descriptor
34270     internal_static_Action_descriptor;
34271   private static
34272     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34273       internal_static_Action_fieldAccessorTable;
34274   private static com.google.protobuf.Descriptors.Descriptor
34275     internal_static_RegionAction_descriptor;
34276   private static
34277     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34278       internal_static_RegionAction_fieldAccessorTable;
34279   private static com.google.protobuf.Descriptors.Descriptor
34280     internal_static_RegionLoadStats_descriptor;
34281   private static
34282     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34283       internal_static_RegionLoadStats_fieldAccessorTable;
34284   private static com.google.protobuf.Descriptors.Descriptor
34285     internal_static_ResultOrException_descriptor;
34286   private static
34287     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34288       internal_static_ResultOrException_fieldAccessorTable;
34289   private static com.google.protobuf.Descriptors.Descriptor
34290     internal_static_RegionActionResult_descriptor;
34291   private static
34292     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34293       internal_static_RegionActionResult_fieldAccessorTable;
34294   private static com.google.protobuf.Descriptors.Descriptor
34295     internal_static_MultiRequest_descriptor;
34296   private static
34297     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34298       internal_static_MultiRequest_fieldAccessorTable;
34299   private static com.google.protobuf.Descriptors.Descriptor
34300     internal_static_MultiResponse_descriptor;
34301   private static
34302     com.google.protobuf.GeneratedMessage.FieldAccessorTable
34303       internal_static_MultiResponse_fieldAccessorTable;
34304 
34305   public static com.google.protobuf.Descriptors.FileDescriptor
34306       getDescriptor() {
34307     return descriptor;
34308   }
34309   private static com.google.protobuf.Descriptors.FileDescriptor
34310       descriptor;
34311   static {
34312     java.lang.String[] descriptorData = {
34313       "\n\014Client.proto\032\013HBase.proto\032\014Filter.prot" +
34314       "o\032\nCell.proto\032\020Comparator.proto\032\017MapRedu" +
34315       "ce.proto\"\037\n\016Authorizations\022\r\n\005label\030\001 \003(" +
34316       "\t\"$\n\016CellVisibility\022\022\n\nexpression\030\001 \002(\t\"" +
34317       "+\n\006Column\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002" +
34318       " \003(\014\"\203\003\n\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(" +
34319       "\0132\007.Column\022!\n\tattribute\030\003 \003(\0132\016.NameByte" +
34320       "sPair\022\027\n\006filter\030\004 \001(\0132\007.Filter\022\036\n\ntime_r" +
34321       "ange\030\005 \001(\0132\n.TimeRange\022\027\n\014max_versions\030\006" +
34322       " \001(\r:\0011\022\032\n\014cache_blocks\030\007 \001(\010:\004true\022\023\n\013s",
34323       "tore_limit\030\010 \001(\r\022\024\n\014store_offset\030\t \001(\r\022\035" +
34324       "\n\016existence_only\030\n \001(\010:\005false\022!\n\022closest" +
34325       "_row_before\030\013 \001(\010:\005false\022)\n\013consistency\030" +
34326       "\014 \001(\0162\014.Consistency:\006STRONG\022-\n\rcf_time_r" +
34327       "ange\030\r \003(\0132\026.ColumnFamilyTimeRange\"z\n\006Re" +
34328       "sult\022\023\n\004cell\030\001 \003(\0132\005.Cell\022\035\n\025associated_" +
34329       "cell_count\030\002 \001(\005\022\016\n\006exists\030\003 \001(\010\022\024\n\005stal" +
34330       "e\030\004 \001(\010:\005false\022\026\n\007partial\030\005 \001(\010:\005false\"A" +
34331       "\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpe" +
34332       "cifier\022\021\n\003get\030\002 \002(\0132\004.Get\"&\n\013GetResponse",
34333       "\022\027\n\006result\030\001 \001(\0132\007.Result\"\200\001\n\tCondition\022" +
34334       "\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifie" +
34335       "r\030\003 \002(\014\022\"\n\014compare_type\030\004 \002(\0162\014.CompareT" +
34336       "ype\022\037\n\ncomparator\030\005 \002(\0132\013.Comparator\"\265\006\n" +
34337       "\rMutationProto\022\013\n\003row\030\001 \001(\014\0220\n\013mutate_ty" +
34338       "pe\030\002 \001(\0162\033.MutationProto.MutationType\0220\n" +
34339       "\014column_value\030\003 \003(\0132\032.MutationProto.Colu" +
34340       "mnValue\022\021\n\ttimestamp\030\004 \001(\004\022!\n\tattribute\030" +
34341       "\005 \003(\0132\016.NameBytesPair\022:\n\ndurability\030\006 \001(" +
34342       "\0162\031.MutationProto.Durability:\013USE_DEFAUL",
34343       "T\022\036\n\ntime_range\030\007 \001(\0132\n.TimeRange\022\035\n\025ass" +
34344       "ociated_cell_count\030\010 \001(\005\022\r\n\005nonce\030\t \001(\004\032" +
34345       "\347\001\n\013ColumnValue\022\016\n\006family\030\001 \002(\014\022B\n\017quali" +
34346       "fier_value\030\002 \003(\0132).MutationProto.ColumnV" +
34347       "alue.QualifierValue\032\203\001\n\016QualifierValue\022\021" +
34348       "\n\tqualifier\030\001 \001(\014\022\r\n\005value\030\002 \001(\014\022\021\n\ttime" +
34349       "stamp\030\003 \001(\004\022.\n\013delete_type\030\004 \001(\0162\031.Mutat" +
34350       "ionProto.DeleteType\022\014\n\004tags\030\005 \001(\014\"W\n\nDur" +
34351       "ability\022\017\n\013USE_DEFAULT\020\000\022\014\n\010SKIP_WAL\020\001\022\r" +
34352       "\n\tASYNC_WAL\020\002\022\014\n\010SYNC_WAL\020\003\022\r\n\tFSYNC_WAL",
34353       "\020\004\">\n\014MutationType\022\n\n\006APPEND\020\000\022\r\n\tINCREM" +
34354       "ENT\020\001\022\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\"p\n\nDeleteType" +
34355       "\022\026\n\022DELETE_ONE_VERSION\020\000\022\034\n\030DELETE_MULTI" +
34356       "PLE_VERSIONS\020\001\022\021\n\rDELETE_FAMILY\020\002\022\031\n\025DEL" +
34357       "ETE_FAMILY_VERSION\020\003\"\207\001\n\rMutateRequest\022 " +
34358       "\n\006region\030\001 \002(\0132\020.RegionSpecifier\022 \n\010muta" +
34359       "tion\030\002 \002(\0132\016.MutationProto\022\035\n\tcondition\030" +
34360       "\003 \001(\0132\n.Condition\022\023\n\013nonce_group\030\004 \001(\004\"<" +
34361       "\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132\007.Resul" +
34362       "t\022\021\n\tprocessed\030\002 \001(\010\"\207\004\n\004Scan\022\027\n\006column\030",
34363       "\001 \003(\0132\007.Column\022!\n\tattribute\030\002 \003(\0132\016.Name" +
34364       "BytesPair\022\021\n\tstart_row\030\003 \001(\014\022\020\n\010stop_row" +
34365       "\030\004 \001(\014\022\027\n\006filter\030\005 \001(\0132\007.Filter\022\036\n\ntime_" +
34366       "range\030\006 \001(\0132\n.TimeRange\022\027\n\014max_versions\030" +
34367       "\007 \001(\r:\0011\022\032\n\014cache_blocks\030\010 \001(\010:\004true\022\022\n\n" +
34368       "batch_size\030\t \001(\r\022\027\n\017max_result_size\030\n \001(" +
34369       "\004\022\023\n\013store_limit\030\013 \001(\r\022\024\n\014store_offset\030\014" +
34370       " \001(\r\022&\n\036load_column_families_on_demand\030\r" +
34371       " \001(\010\022\r\n\005small\030\016 \001(\010\022\027\n\010reversed\030\017 \001(\010:\005f" +
34372       "alse\022)\n\013consistency\030\020 \001(\0162\014.Consistency:",
34373       "\006STRONG\022\017\n\007caching\030\021 \001(\r\022\035\n\025allow_partia" +
34374       "l_results\030\022 \001(\010\022-\n\rcf_time_range\030\023 \003(\0132\026" +
34375       ".ColumnFamilyTimeRange\"\224\002\n\013ScanRequest\022 " +
34376       "\n\006region\030\001 \001(\0132\020.RegionSpecifier\022\023\n\004scan" +
34377       "\030\002 \001(\0132\005.Scan\022\022\n\nscanner_id\030\003 \001(\004\022\026\n\016num" +
34378       "ber_of_rows\030\004 \001(\r\022\025\n\rclose_scanner\030\005 \001(\010" +
34379       "\022\025\n\rnext_call_seq\030\006 \001(\004\022\037\n\027client_handle" +
34380       "s_partials\030\007 \001(\010\022!\n\031client_handles_heart" +
34381       "beats\030\010 \001(\010\022\032\n\022track_scan_metrics\030\t \001(\010\022" +
34382       "\024\n\005renew\030\n \001(\010:\005false\"\210\002\n\014ScanResponse\022\030",
34383       "\n\020cells_per_result\030\001 \003(\r\022\022\n\nscanner_id\030\002" +
34384       " \001(\004\022\024\n\014more_results\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\022" +
34385       "\030\n\007results\030\005 \003(\0132\007.Result\022\r\n\005stale\030\006 \001(\010" +
34386       "\022\037\n\027partial_flag_per_result\030\007 \003(\010\022\036\n\026mor" +
34387       "e_results_in_region\030\010 \001(\010\022\031\n\021heartbeat_m" +
34388       "essage\030\t \001(\010\022\"\n\014scan_metrics\030\n \001(\0132\014.Sca" +
34389       "nMetrics\"\263\001\n\024BulkLoadHFileRequest\022 \n\006reg" +
34390       "ion\030\001 \002(\0132\020.RegionSpecifier\0225\n\013family_pa" +
34391       "th\030\002 \003(\0132 .BulkLoadHFileRequest.FamilyPa" +
34392       "th\022\026\n\016assign_seq_num\030\003 \001(\010\032*\n\nFamilyPath",
34393       "\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkLo" +
34394       "adHFileResponse\022\016\n\006loaded\030\001 \002(\010\"a\n\026Copro" +
34395       "cessorServiceCall\022\013\n\003row\030\001 \002(\014\022\024\n\014servic" +
34396       "e_name\030\002 \002(\t\022\023\n\013method_name\030\003 \002(\t\022\017\n\007req" +
34397       "uest\030\004 \002(\014\"9\n\030CoprocessorServiceResult\022\035" +
34398       "\n\005value\030\001 \001(\0132\016.NameBytesPair\"d\n\031Coproce" +
34399       "ssorServiceRequest\022 \n\006region\030\001 \002(\0132\020.Reg" +
34400       "ionSpecifier\022%\n\004call\030\002 \002(\0132\027.Coprocessor" +
34401       "ServiceCall\"]\n\032CoprocessorServiceRespons" +
34402       "e\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\035\n\005v",
34403       "alue\030\002 \002(\0132\016.NameBytesPair\"{\n\006Action\022\r\n\005" +
34404       "index\030\001 \001(\r\022 \n\010mutation\030\002 \001(\0132\016.Mutation" +
34405       "Proto\022\021\n\003get\030\003 \001(\0132\004.Get\022-\n\014service_call" +
34406       "\030\004 \001(\0132\027.CoprocessorServiceCall\"Y\n\014Regio" +
34407       "nAction\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" +
34408       "r\022\016\n\006atomic\030\002 \001(\010\022\027\n\006action\030\003 \003(\0132\007.Acti" +
34409       "on\"c\n\017RegionLoadStats\022\027\n\014memstoreLoad\030\001 " +
34410       "\001(\005:\0010\022\030\n\rheapOccupancy\030\002 \001(\005:\0010\022\035\n\022comp" +
34411       "actionPressure\030\003 \001(\005:\0010\"\266\001\n\021ResultOrExce" +
34412       "ption\022\r\n\005index\030\001 \001(\r\022\027\n\006result\030\002 \001(\0132\007.R",
34413       "esult\022!\n\texception\030\003 \001(\0132\016.NameBytesPair" +
34414       "\0221\n\016service_result\030\004 \001(\0132\031.CoprocessorSe" +
34415       "rviceResult\022#\n\tloadStats\030\005 \001(\0132\020.RegionL" +
34416       "oadStats\"f\n\022RegionActionResult\022-\n\021result" +
34417       "OrException\030\001 \003(\0132\022.ResultOrException\022!\n" +
34418       "\texception\030\002 \001(\0132\016.NameBytesPair\"f\n\014Mult" +
34419       "iRequest\022#\n\014regionAction\030\001 \003(\0132\r.RegionA" +
34420       "ction\022\022\n\nnonceGroup\030\002 \001(\004\022\035\n\tcondition\030\003" +
34421       " \001(\0132\n.Condition\"S\n\rMultiResponse\022/\n\022reg" +
34422       "ionActionResult\030\001 \003(\0132\023.RegionActionResu",
34423       "lt\022\021\n\tprocessed\030\002 \001(\010*\'\n\013Consistency\022\n\n\006" +
34424       "STRONG\020\000\022\014\n\010TIMELINE\020\0012\205\003\n\rClientService" +
34425       "\022 \n\003Get\022\013.GetRequest\032\014.GetResponse\022)\n\006Mu" +
34426       "tate\022\016.MutateRequest\032\017.MutateResponse\022#\n" +
34427       "\004Scan\022\014.ScanRequest\032\r.ScanResponse\022>\n\rBu" +
34428       "lkLoadHFile\022\025.BulkLoadHFileRequest\032\026.Bul" +
34429       "kLoadHFileResponse\022F\n\013ExecService\022\032.Copr" +
34430       "ocessorServiceRequest\032\033.CoprocessorServi" +
34431       "ceResponse\022R\n\027ExecRegionServerService\022\032." +
34432       "CoprocessorServiceRequest\032\033.CoprocessorS",
34433       "erviceResponse\022&\n\005Multi\022\r.MultiRequest\032\016" +
34434       ".MultiResponseBB\n*org.apache.hadoop.hbas" +
34435       "e.protobuf.generatedB\014ClientProtosH\001\210\001\001\240" +
34436       "\001\001"
34437     };
34438     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
34439       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
34440         public com.google.protobuf.ExtensionRegistry assignDescriptors(
34441             com.google.protobuf.Descriptors.FileDescriptor root) {
34442           descriptor = root;
34443           internal_static_Authorizations_descriptor =
34444             getDescriptor().getMessageTypes().get(0);
34445           internal_static_Authorizations_fieldAccessorTable = new
34446             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34447               internal_static_Authorizations_descriptor,
34448               new java.lang.String[] { "Label", });
34449           internal_static_CellVisibility_descriptor =
34450             getDescriptor().getMessageTypes().get(1);
34451           internal_static_CellVisibility_fieldAccessorTable = new
34452             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34453               internal_static_CellVisibility_descriptor,
34454               new java.lang.String[] { "Expression", });
34455           internal_static_Column_descriptor =
34456             getDescriptor().getMessageTypes().get(2);
34457           internal_static_Column_fieldAccessorTable = new
34458             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34459               internal_static_Column_descriptor,
34460               new java.lang.String[] { "Family", "Qualifier", });
34461           internal_static_Get_descriptor =
34462             getDescriptor().getMessageTypes().get(3);
34463           internal_static_Get_fieldAccessorTable = new
34464             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34465               internal_static_Get_descriptor,
34466               new java.lang.String[] { "Row", "Column", "Attribute", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "StoreLimit", "StoreOffset", "ExistenceOnly", "ClosestRowBefore", "Consistency", "CfTimeRange", });
34467           internal_static_Result_descriptor =
34468             getDescriptor().getMessageTypes().get(4);
34469           internal_static_Result_fieldAccessorTable = new
34470             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34471               internal_static_Result_descriptor,
34472               new java.lang.String[] { "Cell", "AssociatedCellCount", "Exists", "Stale", "Partial", });
34473           internal_static_GetRequest_descriptor =
34474             getDescriptor().getMessageTypes().get(5);
34475           internal_static_GetRequest_fieldAccessorTable = new
34476             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34477               internal_static_GetRequest_descriptor,
34478               new java.lang.String[] { "Region", "Get", });
34479           internal_static_GetResponse_descriptor =
34480             getDescriptor().getMessageTypes().get(6);
34481           internal_static_GetResponse_fieldAccessorTable = new
34482             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34483               internal_static_GetResponse_descriptor,
34484               new java.lang.String[] { "Result", });
34485           internal_static_Condition_descriptor =
34486             getDescriptor().getMessageTypes().get(7);
34487           internal_static_Condition_fieldAccessorTable = new
34488             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34489               internal_static_Condition_descriptor,
34490               new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", });
34491           internal_static_MutationProto_descriptor =
34492             getDescriptor().getMessageTypes().get(8);
34493           internal_static_MutationProto_fieldAccessorTable = new
34494             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34495               internal_static_MutationProto_descriptor,
34496               new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Timestamp", "Attribute", "Durability", "TimeRange", "AssociatedCellCount", "Nonce", });
34497           internal_static_MutationProto_ColumnValue_descriptor =
34498             internal_static_MutationProto_descriptor.getNestedTypes().get(0);
34499           internal_static_MutationProto_ColumnValue_fieldAccessorTable = new
34500             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34501               internal_static_MutationProto_ColumnValue_descriptor,
34502               new java.lang.String[] { "Family", "QualifierValue", });
34503           internal_static_MutationProto_ColumnValue_QualifierValue_descriptor =
34504             internal_static_MutationProto_ColumnValue_descriptor.getNestedTypes().get(0);
34505           internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable = new
34506             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34507               internal_static_MutationProto_ColumnValue_QualifierValue_descriptor,
34508               new java.lang.String[] { "Qualifier", "Value", "Timestamp", "DeleteType", "Tags", });
34509           internal_static_MutateRequest_descriptor =
34510             getDescriptor().getMessageTypes().get(9);
34511           internal_static_MutateRequest_fieldAccessorTable = new
34512             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34513               internal_static_MutateRequest_descriptor,
34514               new java.lang.String[] { "Region", "Mutation", "Condition", "NonceGroup", });
34515           internal_static_MutateResponse_descriptor =
34516             getDescriptor().getMessageTypes().get(10);
34517           internal_static_MutateResponse_fieldAccessorTable = new
34518             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34519               internal_static_MutateResponse_descriptor,
34520               new java.lang.String[] { "Result", "Processed", });
34521           internal_static_Scan_descriptor =
34522             getDescriptor().getMessageTypes().get(11);
34523           internal_static_Scan_fieldAccessorTable = new
34524             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34525               internal_static_Scan_descriptor,
34526               new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "BatchSize", "MaxResultSize", "StoreLimit", "StoreOffset", "LoadColumnFamiliesOnDemand", "Small", "Reversed", "Consistency", "Caching", "AllowPartialResults", "CfTimeRange", });
34527           internal_static_ScanRequest_descriptor =
34528             getDescriptor().getMessageTypes().get(12);
34529           internal_static_ScanRequest_fieldAccessorTable = new
34530             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34531               internal_static_ScanRequest_descriptor,
34532               new java.lang.String[] { "Region", "Scan", "ScannerId", "NumberOfRows", "CloseScanner", "NextCallSeq", "ClientHandlesPartials", "ClientHandlesHeartbeats", "TrackScanMetrics", "Renew", });
34533           internal_static_ScanResponse_descriptor =
34534             getDescriptor().getMessageTypes().get(13);
34535           internal_static_ScanResponse_fieldAccessorTable = new
34536             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34537               internal_static_ScanResponse_descriptor,
34538               new java.lang.String[] { "CellsPerResult", "ScannerId", "MoreResults", "Ttl", "Results", "Stale", "PartialFlagPerResult", "MoreResultsInRegion", "HeartbeatMessage", "ScanMetrics", });
34539           internal_static_BulkLoadHFileRequest_descriptor =
34540             getDescriptor().getMessageTypes().get(14);
34541           internal_static_BulkLoadHFileRequest_fieldAccessorTable = new
34542             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34543               internal_static_BulkLoadHFileRequest_descriptor,
34544               new java.lang.String[] { "Region", "FamilyPath", "AssignSeqNum", });
34545           internal_static_BulkLoadHFileRequest_FamilyPath_descriptor =
34546             internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0);
34547           internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new
34548             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34549               internal_static_BulkLoadHFileRequest_FamilyPath_descriptor,
34550               new java.lang.String[] { "Family", "Path", });
34551           internal_static_BulkLoadHFileResponse_descriptor =
34552             getDescriptor().getMessageTypes().get(15);
34553           internal_static_BulkLoadHFileResponse_fieldAccessorTable = new
34554             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34555               internal_static_BulkLoadHFileResponse_descriptor,
34556               new java.lang.String[] { "Loaded", });
34557           internal_static_CoprocessorServiceCall_descriptor =
34558             getDescriptor().getMessageTypes().get(16);
34559           internal_static_CoprocessorServiceCall_fieldAccessorTable = new
34560             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34561               internal_static_CoprocessorServiceCall_descriptor,
34562               new java.lang.String[] { "Row", "ServiceName", "MethodName", "Request", });
34563           internal_static_CoprocessorServiceResult_descriptor =
34564             getDescriptor().getMessageTypes().get(17);
34565           internal_static_CoprocessorServiceResult_fieldAccessorTable = new
34566             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34567               internal_static_CoprocessorServiceResult_descriptor,
34568               new java.lang.String[] { "Value", });
34569           internal_static_CoprocessorServiceRequest_descriptor =
34570             getDescriptor().getMessageTypes().get(18);
34571           internal_static_CoprocessorServiceRequest_fieldAccessorTable = new
34572             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34573               internal_static_CoprocessorServiceRequest_descriptor,
34574               new java.lang.String[] { "Region", "Call", });
34575           internal_static_CoprocessorServiceResponse_descriptor =
34576             getDescriptor().getMessageTypes().get(19);
34577           internal_static_CoprocessorServiceResponse_fieldAccessorTable = new
34578             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34579               internal_static_CoprocessorServiceResponse_descriptor,
34580               new java.lang.String[] { "Region", "Value", });
34581           internal_static_Action_descriptor =
34582             getDescriptor().getMessageTypes().get(20);
34583           internal_static_Action_fieldAccessorTable = new
34584             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34585               internal_static_Action_descriptor,
34586               new java.lang.String[] { "Index", "Mutation", "Get", "ServiceCall", });
34587           internal_static_RegionAction_descriptor =
34588             getDescriptor().getMessageTypes().get(21);
34589           internal_static_RegionAction_fieldAccessorTable = new
34590             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34591               internal_static_RegionAction_descriptor,
34592               new java.lang.String[] { "Region", "Atomic", "Action", });
34593           internal_static_RegionLoadStats_descriptor =
34594             getDescriptor().getMessageTypes().get(22);
34595           internal_static_RegionLoadStats_fieldAccessorTable = new
34596             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34597               internal_static_RegionLoadStats_descriptor,
34598               new java.lang.String[] { "MemstoreLoad", "HeapOccupancy", "CompactionPressure", });
34599           internal_static_ResultOrException_descriptor =
34600             getDescriptor().getMessageTypes().get(23);
34601           internal_static_ResultOrException_fieldAccessorTable = new
34602             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34603               internal_static_ResultOrException_descriptor,
34604               new java.lang.String[] { "Index", "Result", "Exception", "ServiceResult", "LoadStats", });
34605           internal_static_RegionActionResult_descriptor =
34606             getDescriptor().getMessageTypes().get(24);
34607           internal_static_RegionActionResult_fieldAccessorTable = new
34608             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34609               internal_static_RegionActionResult_descriptor,
34610               new java.lang.String[] { "ResultOrException", "Exception", });
34611           internal_static_MultiRequest_descriptor =
34612             getDescriptor().getMessageTypes().get(25);
34613           internal_static_MultiRequest_fieldAccessorTable = new
34614             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34615               internal_static_MultiRequest_descriptor,
34616               new java.lang.String[] { "RegionAction", "NonceGroup", "Condition", });
34617           internal_static_MultiResponse_descriptor =
34618             getDescriptor().getMessageTypes().get(26);
34619           internal_static_MultiResponse_fieldAccessorTable = new
34620             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
34621               internal_static_MultiResponse_descriptor,
34622               new java.lang.String[] { "RegionActionResult", "Processed", });
34623           return null;
34624         }
34625       };
34626     com.google.protobuf.Descriptors.FileDescriptor
34627       .internalBuildGeneratedFileFrom(descriptorData,
34628         new com.google.protobuf.Descriptors.FileDescriptor[] {
34629           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
34630           org.apache.hadoop.hbase.protobuf.generated.FilterProtos.getDescriptor(),
34631           org.apache.hadoop.hbase.protobuf.generated.CellProtos.getDescriptor(),
34632           org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.getDescriptor(),
34633           org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.getDescriptor(),
34634         }, assigner);
34635   }
34636 
34637   // @@protoc_insertion_point(outer_class_scope)
34638 }
34639