1 // Generated by the protocol buffer compiler. DO NOT EDIT! 2 // source: Client.proto 3 4 package org.apache.hadoop.hbase.protobuf.generated; 5 6 public final class ClientProtos { ClientProtos()7 private ClientProtos() {} registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8 public static void registerAllExtensions( 9 com.google.protobuf.ExtensionRegistry registry) { 10 } 11 /** 12 * Protobuf enum {@code Consistency} 13 * 14 * <pre> 15 ** 16 * Consistency defines the expected consistency level for an operation. 17 * </pre> 18 */ 19 public enum Consistency 20 implements com.google.protobuf.ProtocolMessageEnum { 21 /** 22 * <code>STRONG = 0;</code> 23 */ 24 STRONG(0, 0), 25 /** 26 * <code>TIMELINE = 1;</code> 27 */ 28 TIMELINE(1, 1), 29 ; 30 31 /** 32 * <code>STRONG = 0;</code> 33 */ 34 public static final int STRONG_VALUE = 0; 35 /** 36 * <code>TIMELINE = 1;</code> 37 */ 38 public static final int TIMELINE_VALUE = 1; 39 40 getNumber()41 public final int getNumber() { return value; } 42 valueOf(int value)43 public static Consistency valueOf(int value) { 44 switch (value) { 45 case 0: return STRONG; 46 case 1: return TIMELINE; 47 default: return null; 48 } 49 } 50 51 public static com.google.protobuf.Internal.EnumLiteMap<Consistency> internalGetValueMap()52 internalGetValueMap() { 53 return internalValueMap; 54 } 55 private static com.google.protobuf.Internal.EnumLiteMap<Consistency> 56 internalValueMap = 57 new com.google.protobuf.Internal.EnumLiteMap<Consistency>() { 58 public Consistency findValueByNumber(int number) { 59 return Consistency.valueOf(number); 60 } 61 }; 62 63 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()64 getValueDescriptor() { 65 return getDescriptor().getValues().get(index); 66 } 67 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()68 getDescriptorForType() { 69 return getDescriptor(); 70 } 71 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()72 getDescriptor() { 73 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor().getEnumTypes().get(0); 74 } 75 76 private static final Consistency[] VALUES = values(); 77 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)78 public static Consistency valueOf( 79 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 80 if (desc.getType() != getDescriptor()) { 81 throw new java.lang.IllegalArgumentException( 82 "EnumValueDescriptor is not for this type."); 83 } 84 return VALUES[desc.getIndex()]; 85 } 86 87 private final int index; 88 private final int value; 89 Consistency(int index, int value)90 private Consistency(int index, int value) { 91 this.index = index; 92 this.value = value; 93 } 94 95 // @@protoc_insertion_point(enum_scope:Consistency) 96 } 97 98 public interface AuthorizationsOrBuilder 99 extends com.google.protobuf.MessageOrBuilder { 100 101 // repeated string label = 1; 102 /** 103 * <code>repeated string label = 1;</code> 104 */ 105 java.util.List<java.lang.String> getLabelList()106 getLabelList(); 107 /** 108 * <code>repeated string label = 1;</code> 109 */ getLabelCount()110 int getLabelCount(); 111 /** 112 * <code>repeated string label = 1;</code> 113 */ getLabel(int index)114 java.lang.String getLabel(int index); 115 /** 116 * <code>repeated string label = 1;</code> 117 */ 118 com.google.protobuf.ByteString getLabelBytes(int index)119 getLabelBytes(int index); 120 } 121 /** 122 * Protobuf type {@code Authorizations} 123 * 124 * <pre> 125 ** 126 * The protocol buffer version of Authorizations. 127 * </pre> 128 */ 129 public static final class Authorizations extends 130 com.google.protobuf.GeneratedMessage 131 implements AuthorizationsOrBuilder { 132 // Use Authorizations.newBuilder() to construct. Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder)133 private Authorizations(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 134 super(builder); 135 this.unknownFields = builder.getUnknownFields(); 136 } Authorizations(boolean noInit)137 private Authorizations(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 138 139 private static final Authorizations defaultInstance; getDefaultInstance()140 public static Authorizations getDefaultInstance() { 141 return defaultInstance; 142 } 143 getDefaultInstanceForType()144 public Authorizations getDefaultInstanceForType() { 145 return defaultInstance; 146 } 147 148 private final com.google.protobuf.UnknownFieldSet unknownFields; 149 @java.lang.Override 150 public final com.google.protobuf.UnknownFieldSet getUnknownFields()151 getUnknownFields() { 152 return this.unknownFields; 153 } Authorizations( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)154 private Authorizations( 155 com.google.protobuf.CodedInputStream input, 156 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 157 throws com.google.protobuf.InvalidProtocolBufferException { 158 initFields(); 159 int mutable_bitField0_ = 0; 160 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 161 com.google.protobuf.UnknownFieldSet.newBuilder(); 162 try { 163 boolean done = false; 164 while (!done) { 165 int tag = input.readTag(); 166 switch (tag) { 167 case 0: 168 done = true; 169 break; 170 default: { 171 if (!parseUnknownField(input, unknownFields, 172 extensionRegistry, tag)) { 173 done = true; 174 } 175 break; 176 } 177 case 10: { 178 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 179 label_ = new com.google.protobuf.LazyStringArrayList(); 180 mutable_bitField0_ |= 0x00000001; 181 } 182 label_.add(input.readBytes()); 183 break; 184 } 185 } 186 } 187 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 188 throw e.setUnfinishedMessage(this); 189 } catch (java.io.IOException e) { 190 throw new com.google.protobuf.InvalidProtocolBufferException( 191 e.getMessage()).setUnfinishedMessage(this); 192 } finally { 193 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 194 label_ = new com.google.protobuf.UnmodifiableLazyStringList(label_); 195 } 196 this.unknownFields = unknownFields.build(); 197 makeExtensionsImmutable(); 198 } 199 } 200 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()201 getDescriptor() { 202 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor; 203 } 204 205 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()206 internalGetFieldAccessorTable() { 207 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable 208 .ensureFieldAccessorsInitialized( 209 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class); 210 } 211 212 public static com.google.protobuf.Parser<Authorizations> PARSER = 213 new com.google.protobuf.AbstractParser<Authorizations>() { 214 public Authorizations parsePartialFrom( 215 com.google.protobuf.CodedInputStream input, 216 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 217 throws com.google.protobuf.InvalidProtocolBufferException { 218 return new Authorizations(input, extensionRegistry); 219 } 220 }; 221 222 @java.lang.Override getParserForType()223 public com.google.protobuf.Parser<Authorizations> getParserForType() { 224 return PARSER; 225 } 226 227 // repeated string label = 1; 228 public static final int LABEL_FIELD_NUMBER = 1; 229 private com.google.protobuf.LazyStringList label_; 230 /** 231 * <code>repeated string label = 1;</code> 232 */ 233 public java.util.List<java.lang.String> getLabelList()234 getLabelList() { 235 return label_; 236 } 237 /** 238 * <code>repeated string label = 1;</code> 239 */ getLabelCount()240 public int getLabelCount() { 241 return label_.size(); 242 } 243 /** 244 * <code>repeated string label = 1;</code> 245 */ getLabel(int index)246 public java.lang.String getLabel(int index) { 247 return label_.get(index); 248 } 249 /** 250 * <code>repeated string label = 1;</code> 251 */ 252 public com.google.protobuf.ByteString getLabelBytes(int index)253 getLabelBytes(int index) { 254 return label_.getByteString(index); 255 } 256 initFields()257 private void initFields() { 258 label_ = com.google.protobuf.LazyStringArrayList.EMPTY; 259 } 260 private byte memoizedIsInitialized = -1; isInitialized()261 public final boolean isInitialized() { 262 byte isInitialized = memoizedIsInitialized; 263 if (isInitialized != -1) return isInitialized == 1; 264 265 memoizedIsInitialized = 1; 266 return true; 267 } 268 writeTo(com.google.protobuf.CodedOutputStream output)269 public void writeTo(com.google.protobuf.CodedOutputStream output) 270 throws java.io.IOException { 271 getSerializedSize(); 272 for (int i = 0; i < label_.size(); i++) { 273 output.writeBytes(1, label_.getByteString(i)); 274 } 275 getUnknownFields().writeTo(output); 276 } 277 278 private int memoizedSerializedSize = -1; getSerializedSize()279 public int getSerializedSize() { 280 int size = memoizedSerializedSize; 281 if (size != -1) return size; 282 283 size = 0; 284 { 285 int dataSize = 0; 286 for (int i = 0; i < label_.size(); i++) { 287 dataSize += com.google.protobuf.CodedOutputStream 288 .computeBytesSizeNoTag(label_.getByteString(i)); 289 } 290 size += dataSize; 291 size += 1 * getLabelList().size(); 292 } 293 size += getUnknownFields().getSerializedSize(); 294 memoizedSerializedSize = size; 295 return size; 296 } 297 298 private static final long serialVersionUID = 0L; 299 @java.lang.Override writeReplace()300 protected java.lang.Object writeReplace() 301 throws java.io.ObjectStreamException { 302 return super.writeReplace(); 303 } 304 305 @java.lang.Override equals(final java.lang.Object obj)306 public boolean equals(final java.lang.Object obj) { 307 if (obj == this) { 308 return true; 309 } 310 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)) { 311 return super.equals(obj); 312 } 313 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) obj; 314 315 boolean result = true; 316 result = result && getLabelList() 317 .equals(other.getLabelList()); 318 result = result && 319 getUnknownFields().equals(other.getUnknownFields()); 320 return result; 321 } 322 323 private int memoizedHashCode = 0; 324 @java.lang.Override hashCode()325 public int hashCode() { 326 if (memoizedHashCode != 0) { 327 return memoizedHashCode; 328 } 329 int hash = 41; 330 hash = (19 * hash) + getDescriptorForType().hashCode(); 331 if (getLabelCount() > 0) { 332 hash = (37 * hash) + LABEL_FIELD_NUMBER; 333 hash = (53 * hash) + getLabelList().hashCode(); 334 } 335 hash = (29 * hash) + getUnknownFields().hashCode(); 336 memoizedHashCode = hash; 337 return hash; 338 } 339 parseFrom( com.google.protobuf.ByteString data)340 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( 341 com.google.protobuf.ByteString data) 342 throws com.google.protobuf.InvalidProtocolBufferException { 343 return PARSER.parseFrom(data); 344 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)345 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( 346 com.google.protobuf.ByteString data, 347 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 348 throws com.google.protobuf.InvalidProtocolBufferException { 349 return PARSER.parseFrom(data, extensionRegistry); 350 } parseFrom(byte[] data)351 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(byte[] data) 352 throws com.google.protobuf.InvalidProtocolBufferException { 353 return PARSER.parseFrom(data); 354 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)355 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( 356 byte[] data, 357 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 358 throws com.google.protobuf.InvalidProtocolBufferException { 359 return PARSER.parseFrom(data, extensionRegistry); 360 } parseFrom(java.io.InputStream input)361 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom(java.io.InputStream input) 362 throws java.io.IOException { 363 return PARSER.parseFrom(input); 364 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)365 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( 366 java.io.InputStream input, 367 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 368 throws java.io.IOException { 369 return PARSER.parseFrom(input, extensionRegistry); 370 } parseDelimitedFrom(java.io.InputStream input)371 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom(java.io.InputStream input) 372 throws java.io.IOException { 373 return PARSER.parseDelimitedFrom(input); 374 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)375 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseDelimitedFrom( 376 java.io.InputStream input, 377 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 378 throws java.io.IOException { 379 return PARSER.parseDelimitedFrom(input, extensionRegistry); 380 } parseFrom( com.google.protobuf.CodedInputStream input)381 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( 382 com.google.protobuf.CodedInputStream input) 383 throws java.io.IOException { 384 return PARSER.parseFrom(input); 385 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)386 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parseFrom( 387 com.google.protobuf.CodedInputStream input, 388 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 389 throws java.io.IOException { 390 return PARSER.parseFrom(input, extensionRegistry); 391 } 392 newBuilder()393 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()394 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype)395 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations prototype) { 396 return newBuilder().mergeFrom(prototype); 397 } toBuilder()398 public Builder toBuilder() { return newBuilder(this); } 399 400 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)401 protected Builder newBuilderForType( 402 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 403 Builder builder = new Builder(parent); 404 return builder; 405 } 406 /** 407 * Protobuf type {@code Authorizations} 408 * 409 * <pre> 410 ** 411 * The protocol buffer version of Authorizations. 412 * </pre> 413 */ 414 public static final class Builder extends 415 com.google.protobuf.GeneratedMessage.Builder<Builder> 416 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.AuthorizationsOrBuilder { 417 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()418 getDescriptor() { 419 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor; 420 } 421 422 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()423 internalGetFieldAccessorTable() { 424 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_fieldAccessorTable 425 .ensureFieldAccessorsInitialized( 426 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.Builder.class); 427 } 428 429 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.newBuilder() Builder()430 private Builder() { 431 maybeForceBuilderInitialization(); 432 } 433 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)434 private Builder( 435 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 436 super(parent); 437 maybeForceBuilderInitialization(); 438 } maybeForceBuilderInitialization()439 private void maybeForceBuilderInitialization() { 440 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 441 } 442 } create()443 private static Builder create() { 444 return new Builder(); 445 } 446 clear()447 public Builder clear() { 448 super.clear(); 449 label_ = com.google.protobuf.LazyStringArrayList.EMPTY; 450 bitField0_ = (bitField0_ & ~0x00000001); 451 return this; 452 } 453 clone()454 public Builder clone() { 455 return create().mergeFrom(buildPartial()); 456 } 457 458 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()459 getDescriptorForType() { 460 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Authorizations_descriptor; 461 } 462 getDefaultInstanceForType()463 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations getDefaultInstanceForType() { 464 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance(); 465 } 466 build()467 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations build() { 468 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = buildPartial(); 469 if (!result.isInitialized()) { 470 throw newUninitializedMessageException(result); 471 } 472 return result; 473 } 474 buildPartial()475 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations buildPartial() { 476 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations(this); 477 int from_bitField0_ = bitField0_; 478 if (((bitField0_ & 0x00000001) == 0x00000001)) { 479 label_ = new com.google.protobuf.UnmodifiableLazyStringList( 480 label_); 481 bitField0_ = (bitField0_ & ~0x00000001); 482 } 483 result.label_ = label_; 484 onBuilt(); 485 return result; 486 } 487 mergeFrom(com.google.protobuf.Message other)488 public Builder mergeFrom(com.google.protobuf.Message other) { 489 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) { 490 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations)other); 491 } else { 492 super.mergeFrom(other); 493 return this; 494 } 495 } 496 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other)497 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations other) { 498 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations.getDefaultInstance()) return this; 499 if (!other.label_.isEmpty()) { 500 if (label_.isEmpty()) { 501 label_ = other.label_; 502 bitField0_ = (bitField0_ & ~0x00000001); 503 } else { 504 ensureLabelIsMutable(); 505 label_.addAll(other.label_); 506 } 507 onChanged(); 508 } 509 this.mergeUnknownFields(other.getUnknownFields()); 510 return this; 511 } 512 isInitialized()513 public final boolean isInitialized() { 514 return true; 515 } 516 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)517 public Builder mergeFrom( 518 com.google.protobuf.CodedInputStream input, 519 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 520 throws java.io.IOException { 521 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations parsedMessage = null; 522 try { 523 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 524 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 525 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Authorizations) e.getUnfinishedMessage(); 526 throw e; 527 } finally { 528 if (parsedMessage != null) { 529 mergeFrom(parsedMessage); 530 } 531 } 532 return this; 533 } 534 private int bitField0_; 535 536 // repeated string label = 1; 537 private com.google.protobuf.LazyStringList label_ = com.google.protobuf.LazyStringArrayList.EMPTY; ensureLabelIsMutable()538 private void ensureLabelIsMutable() { 539 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 540 label_ = new com.google.protobuf.LazyStringArrayList(label_); 541 bitField0_ |= 0x00000001; 542 } 543 } 544 /** 545 * <code>repeated string label = 1;</code> 546 */ 547 public java.util.List<java.lang.String> getLabelList()548 getLabelList() { 549 return java.util.Collections.unmodifiableList(label_); 550 } 551 /** 552 * <code>repeated string label = 1;</code> 553 */ getLabelCount()554 public int getLabelCount() { 555 return label_.size(); 556 } 557 /** 558 * <code>repeated string label = 1;</code> 559 */ getLabel(int index)560 public java.lang.String getLabel(int index) { 561 return label_.get(index); 562 } 563 /** 564 * <code>repeated string label = 1;</code> 565 */ 566 public com.google.protobuf.ByteString getLabelBytes(int index)567 getLabelBytes(int index) { 568 return label_.getByteString(index); 569 } 570 /** 571 * <code>repeated string label = 1;</code> 572 */ setLabel( int index, java.lang.String value)573 public Builder setLabel( 574 int index, java.lang.String value) { 575 if (value == null) { 576 throw new NullPointerException(); 577 } 578 ensureLabelIsMutable(); 579 label_.set(index, value); 580 onChanged(); 581 return this; 582 } 583 /** 584 * <code>repeated string label = 1;</code> 585 */ addLabel( java.lang.String value)586 public Builder addLabel( 587 java.lang.String value) { 588 if (value == null) { 589 throw new NullPointerException(); 590 } 591 ensureLabelIsMutable(); 592 label_.add(value); 593 onChanged(); 594 return this; 595 } 596 /** 597 * <code>repeated string label = 1;</code> 598 */ addAllLabel( java.lang.Iterable<java.lang.String> values)599 public Builder addAllLabel( 600 java.lang.Iterable<java.lang.String> values) { 601 ensureLabelIsMutable(); 602 super.addAll(values, label_); 603 onChanged(); 604 return this; 605 } 606 /** 607 * <code>repeated string label = 1;</code> 608 */ clearLabel()609 public Builder clearLabel() { 610 label_ = com.google.protobuf.LazyStringArrayList.EMPTY; 611 bitField0_ = (bitField0_ & ~0x00000001); 612 onChanged(); 613 return this; 614 } 615 /** 616 * <code>repeated string label = 1;</code> 617 */ addLabelBytes( com.google.protobuf.ByteString value)618 public Builder addLabelBytes( 619 com.google.protobuf.ByteString value) { 620 if (value == null) { 621 throw new NullPointerException(); 622 } 623 ensureLabelIsMutable(); 624 label_.add(value); 625 onChanged(); 626 return this; 627 } 628 629 // @@protoc_insertion_point(builder_scope:Authorizations) 630 } 631 632 static { 633 defaultInstance = new Authorizations(true); defaultInstance.initFields()634 defaultInstance.initFields(); 635 } 636 637 // @@protoc_insertion_point(class_scope:Authorizations) 638 } 639 640 public interface CellVisibilityOrBuilder 641 extends com.google.protobuf.MessageOrBuilder { 642 643 // required string expression = 1; 644 /** 645 * <code>required string expression = 1;</code> 646 */ hasExpression()647 boolean hasExpression(); 648 /** 649 * <code>required string expression = 1;</code> 650 */ getExpression()651 java.lang.String getExpression(); 652 /** 653 * <code>required string expression = 1;</code> 654 */ 655 com.google.protobuf.ByteString getExpressionBytes()656 getExpressionBytes(); 657 } 658 /** 659 * Protobuf type {@code CellVisibility} 660 * 661 * <pre> 662 ** 663 * The protocol buffer version of CellVisibility. 664 * </pre> 665 */ 666 public static final class CellVisibility extends 667 com.google.protobuf.GeneratedMessage 668 implements CellVisibilityOrBuilder { 669 // Use CellVisibility.newBuilder() to construct. CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder)670 private CellVisibility(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 671 super(builder); 672 this.unknownFields = builder.getUnknownFields(); 673 } CellVisibility(boolean noInit)674 private CellVisibility(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 675 676 private static final CellVisibility defaultInstance; getDefaultInstance()677 public static CellVisibility getDefaultInstance() { 678 return defaultInstance; 679 } 680 getDefaultInstanceForType()681 public CellVisibility getDefaultInstanceForType() { 682 return defaultInstance; 683 } 684 685 private final com.google.protobuf.UnknownFieldSet unknownFields; 686 @java.lang.Override 687 public final com.google.protobuf.UnknownFieldSet getUnknownFields()688 getUnknownFields() { 689 return this.unknownFields; 690 } CellVisibility( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)691 private CellVisibility( 692 com.google.protobuf.CodedInputStream input, 693 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 694 throws com.google.protobuf.InvalidProtocolBufferException { 695 initFields(); 696 int mutable_bitField0_ = 0; 697 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 698 com.google.protobuf.UnknownFieldSet.newBuilder(); 699 try { 700 boolean done = false; 701 while (!done) { 702 int tag = input.readTag(); 703 switch (tag) { 704 case 0: 705 done = true; 706 break; 707 default: { 708 if (!parseUnknownField(input, unknownFields, 709 extensionRegistry, tag)) { 710 done = true; 711 } 712 break; 713 } 714 case 10: { 715 bitField0_ |= 0x00000001; 716 expression_ = input.readBytes(); 717 break; 718 } 719 } 720 } 721 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 722 throw e.setUnfinishedMessage(this); 723 } catch (java.io.IOException e) { 724 throw new com.google.protobuf.InvalidProtocolBufferException( 725 e.getMessage()).setUnfinishedMessage(this); 726 } finally { 727 this.unknownFields = unknownFields.build(); 728 makeExtensionsImmutable(); 729 } 730 } 731 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()732 getDescriptor() { 733 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor; 734 } 735 736 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()737 internalGetFieldAccessorTable() { 738 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable 739 .ensureFieldAccessorsInitialized( 740 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class); 741 } 742 743 public static com.google.protobuf.Parser<CellVisibility> PARSER = 744 new com.google.protobuf.AbstractParser<CellVisibility>() { 745 public CellVisibility parsePartialFrom( 746 com.google.protobuf.CodedInputStream input, 747 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 748 throws com.google.protobuf.InvalidProtocolBufferException { 749 return new CellVisibility(input, extensionRegistry); 750 } 751 }; 752 753 @java.lang.Override getParserForType()754 public com.google.protobuf.Parser<CellVisibility> getParserForType() { 755 return PARSER; 756 } 757 758 private int bitField0_; 759 // required string expression = 1; 760 public static final int EXPRESSION_FIELD_NUMBER = 1; 761 private java.lang.Object expression_; 762 /** 763 * <code>required string expression = 1;</code> 764 */ hasExpression()765 public boolean hasExpression() { 766 return ((bitField0_ & 0x00000001) == 0x00000001); 767 } 768 /** 769 * <code>required string expression = 1;</code> 770 */ getExpression()771 public java.lang.String getExpression() { 772 java.lang.Object ref = expression_; 773 if (ref instanceof java.lang.String) { 774 return (java.lang.String) ref; 775 } else { 776 com.google.protobuf.ByteString bs = 777 (com.google.protobuf.ByteString) ref; 778 java.lang.String s = bs.toStringUtf8(); 779 if (bs.isValidUtf8()) { 780 expression_ = s; 781 } 782 return s; 783 } 784 } 785 /** 786 * <code>required string expression = 1;</code> 787 */ 788 public com.google.protobuf.ByteString getExpressionBytes()789 getExpressionBytes() { 790 java.lang.Object ref = expression_; 791 if (ref instanceof java.lang.String) { 792 com.google.protobuf.ByteString b = 793 com.google.protobuf.ByteString.copyFromUtf8( 794 (java.lang.String) ref); 795 expression_ = b; 796 return b; 797 } else { 798 return (com.google.protobuf.ByteString) ref; 799 } 800 } 801 initFields()802 private void initFields() { 803 expression_ = ""; 804 } 805 private byte memoizedIsInitialized = -1; isInitialized()806 public final boolean isInitialized() { 807 byte isInitialized = memoizedIsInitialized; 808 if (isInitialized != -1) return isInitialized == 1; 809 810 if (!hasExpression()) { 811 memoizedIsInitialized = 0; 812 return false; 813 } 814 memoizedIsInitialized = 1; 815 return true; 816 } 817 writeTo(com.google.protobuf.CodedOutputStream output)818 public void writeTo(com.google.protobuf.CodedOutputStream output) 819 throws java.io.IOException { 820 getSerializedSize(); 821 if (((bitField0_ & 0x00000001) == 0x00000001)) { 822 output.writeBytes(1, getExpressionBytes()); 823 } 824 getUnknownFields().writeTo(output); 825 } 826 827 private int memoizedSerializedSize = -1; getSerializedSize()828 public int getSerializedSize() { 829 int size = memoizedSerializedSize; 830 if (size != -1) return size; 831 832 size = 0; 833 if (((bitField0_ & 0x00000001) == 0x00000001)) { 834 size += com.google.protobuf.CodedOutputStream 835 .computeBytesSize(1, getExpressionBytes()); 836 } 837 size += getUnknownFields().getSerializedSize(); 838 memoizedSerializedSize = size; 839 return size; 840 } 841 842 private static final long serialVersionUID = 0L; 843 @java.lang.Override writeReplace()844 protected java.lang.Object writeReplace() 845 throws java.io.ObjectStreamException { 846 return super.writeReplace(); 847 } 848 849 @java.lang.Override equals(final java.lang.Object obj)850 public boolean equals(final java.lang.Object obj) { 851 if (obj == this) { 852 return true; 853 } 854 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)) { 855 return super.equals(obj); 856 } 857 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) obj; 858 859 boolean result = true; 860 result = result && (hasExpression() == other.hasExpression()); 861 if (hasExpression()) { 862 result = result && getExpression() 863 .equals(other.getExpression()); 864 } 865 result = result && 866 getUnknownFields().equals(other.getUnknownFields()); 867 return result; 868 } 869 870 private int memoizedHashCode = 0; 871 @java.lang.Override hashCode()872 public int hashCode() { 873 if (memoizedHashCode != 0) { 874 return memoizedHashCode; 875 } 876 int hash = 41; 877 hash = (19 * hash) + getDescriptorForType().hashCode(); 878 if (hasExpression()) { 879 hash = (37 * hash) + EXPRESSION_FIELD_NUMBER; 880 hash = (53 * hash) + getExpression().hashCode(); 881 } 882 hash = (29 * hash) + getUnknownFields().hashCode(); 883 memoizedHashCode = hash; 884 return hash; 885 } 886 parseFrom( com.google.protobuf.ByteString data)887 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( 888 com.google.protobuf.ByteString data) 889 throws com.google.protobuf.InvalidProtocolBufferException { 890 return PARSER.parseFrom(data); 891 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)892 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( 893 com.google.protobuf.ByteString data, 894 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 895 throws com.google.protobuf.InvalidProtocolBufferException { 896 return PARSER.parseFrom(data, extensionRegistry); 897 } parseFrom(byte[] data)898 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(byte[] data) 899 throws com.google.protobuf.InvalidProtocolBufferException { 900 return PARSER.parseFrom(data); 901 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)902 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( 903 byte[] data, 904 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 905 throws com.google.protobuf.InvalidProtocolBufferException { 906 return PARSER.parseFrom(data, extensionRegistry); 907 } parseFrom(java.io.InputStream input)908 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom(java.io.InputStream input) 909 throws java.io.IOException { 910 return PARSER.parseFrom(input); 911 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)912 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( 913 java.io.InputStream input, 914 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 915 throws java.io.IOException { 916 return PARSER.parseFrom(input, extensionRegistry); 917 } parseDelimitedFrom(java.io.InputStream input)918 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom(java.io.InputStream input) 919 throws java.io.IOException { 920 return PARSER.parseDelimitedFrom(input); 921 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)922 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseDelimitedFrom( 923 java.io.InputStream input, 924 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 925 throws java.io.IOException { 926 return PARSER.parseDelimitedFrom(input, extensionRegistry); 927 } parseFrom( com.google.protobuf.CodedInputStream input)928 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( 929 com.google.protobuf.CodedInputStream input) 930 throws java.io.IOException { 931 return PARSER.parseFrom(input); 932 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)933 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parseFrom( 934 com.google.protobuf.CodedInputStream input, 935 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 936 throws java.io.IOException { 937 return PARSER.parseFrom(input, extensionRegistry); 938 } 939 newBuilder()940 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()941 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype)942 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility prototype) { 943 return newBuilder().mergeFrom(prototype); 944 } toBuilder()945 public Builder toBuilder() { return newBuilder(this); } 946 947 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)948 protected Builder newBuilderForType( 949 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 950 Builder builder = new Builder(parent); 951 return builder; 952 } 953 /** 954 * Protobuf type {@code CellVisibility} 955 * 956 * <pre> 957 ** 958 * The protocol buffer version of CellVisibility. 959 * </pre> 960 */ 961 public static final class Builder extends 962 com.google.protobuf.GeneratedMessage.Builder<Builder> 963 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibilityOrBuilder { 964 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()965 getDescriptor() { 966 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor; 967 } 968 969 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()970 internalGetFieldAccessorTable() { 971 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_fieldAccessorTable 972 .ensureFieldAccessorsInitialized( 973 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.Builder.class); 974 } 975 976 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.newBuilder() Builder()977 private Builder() { 978 maybeForceBuilderInitialization(); 979 } 980 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)981 private Builder( 982 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 983 super(parent); 984 maybeForceBuilderInitialization(); 985 } maybeForceBuilderInitialization()986 private void maybeForceBuilderInitialization() { 987 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 988 } 989 } create()990 private static Builder create() { 991 return new Builder(); 992 } 993 clear()994 public Builder clear() { 995 super.clear(); 996 expression_ = ""; 997 bitField0_ = (bitField0_ & ~0x00000001); 998 return this; 999 } 1000 clone()1001 public Builder clone() { 1002 return create().mergeFrom(buildPartial()); 1003 } 1004 1005 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()1006 getDescriptorForType() { 1007 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_CellVisibility_descriptor; 1008 } 1009 getDefaultInstanceForType()1010 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility getDefaultInstanceForType() { 1011 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance(); 1012 } 1013 build()1014 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility build() { 1015 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = buildPartial(); 1016 if (!result.isInitialized()) { 1017 throw newUninitializedMessageException(result); 1018 } 1019 return result; 1020 } 1021 buildPartial()1022 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility buildPartial() { 1023 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility(this); 1024 int from_bitField0_ = bitField0_; 1025 int to_bitField0_ = 0; 1026 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1027 to_bitField0_ |= 0x00000001; 1028 } 1029 result.expression_ = expression_; 1030 result.bitField0_ = to_bitField0_; 1031 onBuilt(); 1032 return result; 1033 } 1034 mergeFrom(com.google.protobuf.Message other)1035 public Builder mergeFrom(com.google.protobuf.Message other) { 1036 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) { 1037 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility)other); 1038 } else { 1039 super.mergeFrom(other); 1040 return this; 1041 } 1042 } 1043 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other)1044 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility other) { 1045 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility.getDefaultInstance()) return this; 1046 if (other.hasExpression()) { 1047 bitField0_ |= 0x00000001; 1048 expression_ = other.expression_; 1049 onChanged(); 1050 } 1051 this.mergeUnknownFields(other.getUnknownFields()); 1052 return this; 1053 } 1054 isInitialized()1055 public final boolean isInitialized() { 1056 if (!hasExpression()) { 1057 1058 return false; 1059 } 1060 return true; 1061 } 1062 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1063 public Builder mergeFrom( 1064 com.google.protobuf.CodedInputStream input, 1065 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1066 throws java.io.IOException { 1067 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility parsedMessage = null; 1068 try { 1069 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1070 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1071 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CellVisibility) e.getUnfinishedMessage(); 1072 throw e; 1073 } finally { 1074 if (parsedMessage != null) { 1075 mergeFrom(parsedMessage); 1076 } 1077 } 1078 return this; 1079 } 1080 private int bitField0_; 1081 1082 // required string expression = 1; 1083 private java.lang.Object expression_ = ""; 1084 /** 1085 * <code>required string expression = 1;</code> 1086 */ hasExpression()1087 public boolean hasExpression() { 1088 return ((bitField0_ & 0x00000001) == 0x00000001); 1089 } 1090 /** 1091 * <code>required string expression = 1;</code> 1092 */ getExpression()1093 public java.lang.String getExpression() { 1094 java.lang.Object ref = expression_; 1095 if (!(ref instanceof java.lang.String)) { 1096 java.lang.String s = ((com.google.protobuf.ByteString) ref) 1097 .toStringUtf8(); 1098 expression_ = s; 1099 return s; 1100 } else { 1101 return (java.lang.String) ref; 1102 } 1103 } 1104 /** 1105 * <code>required string expression = 1;</code> 1106 */ 1107 public com.google.protobuf.ByteString getExpressionBytes()1108 getExpressionBytes() { 1109 java.lang.Object ref = expression_; 1110 if (ref instanceof String) { 1111 com.google.protobuf.ByteString b = 1112 com.google.protobuf.ByteString.copyFromUtf8( 1113 (java.lang.String) ref); 1114 expression_ = b; 1115 return b; 1116 } else { 1117 return (com.google.protobuf.ByteString) ref; 1118 } 1119 } 1120 /** 1121 * <code>required string expression = 1;</code> 1122 */ setExpression( java.lang.String value)1123 public Builder setExpression( 1124 java.lang.String value) { 1125 if (value == null) { 1126 throw new NullPointerException(); 1127 } 1128 bitField0_ |= 0x00000001; 1129 expression_ = value; 1130 onChanged(); 1131 return this; 1132 } 1133 /** 1134 * <code>required string expression = 1;</code> 1135 */ clearExpression()1136 public Builder clearExpression() { 1137 bitField0_ = (bitField0_ & ~0x00000001); 1138 expression_ = getDefaultInstance().getExpression(); 1139 onChanged(); 1140 return this; 1141 } 1142 /** 1143 * <code>required string expression = 1;</code> 1144 */ setExpressionBytes( com.google.protobuf.ByteString value)1145 public Builder setExpressionBytes( 1146 com.google.protobuf.ByteString value) { 1147 if (value == null) { 1148 throw new NullPointerException(); 1149 } 1150 bitField0_ |= 0x00000001; 1151 expression_ = value; 1152 onChanged(); 1153 return this; 1154 } 1155 1156 // @@protoc_insertion_point(builder_scope:CellVisibility) 1157 } 1158 1159 static { 1160 defaultInstance = new CellVisibility(true); defaultInstance.initFields()1161 defaultInstance.initFields(); 1162 } 1163 1164 // @@protoc_insertion_point(class_scope:CellVisibility) 1165 } 1166 1167 public interface ColumnOrBuilder 1168 extends com.google.protobuf.MessageOrBuilder { 1169 1170 // required bytes family = 1; 1171 /** 1172 * <code>required bytes family = 1;</code> 1173 */ hasFamily()1174 boolean hasFamily(); 1175 /** 1176 * <code>required bytes family = 1;</code> 1177 */ getFamily()1178 com.google.protobuf.ByteString getFamily(); 1179 1180 // repeated bytes qualifier = 2; 1181 /** 1182 * <code>repeated bytes qualifier = 2;</code> 1183 */ getQualifierList()1184 java.util.List<com.google.protobuf.ByteString> getQualifierList(); 1185 /** 1186 * <code>repeated bytes qualifier = 2;</code> 1187 */ getQualifierCount()1188 int getQualifierCount(); 1189 /** 1190 * <code>repeated bytes qualifier = 2;</code> 1191 */ getQualifier(int index)1192 com.google.protobuf.ByteString getQualifier(int index); 1193 } 1194 /** 1195 * Protobuf type {@code Column} 1196 * 1197 * <pre> 1198 ** 1199 * Container for a list of column qualifier names of a family. 1200 * </pre> 1201 */ 1202 public static final class Column extends 1203 com.google.protobuf.GeneratedMessage 1204 implements ColumnOrBuilder { 1205 // Use Column.newBuilder() to construct. Column(com.google.protobuf.GeneratedMessage.Builder<?> builder)1206 private Column(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 1207 super(builder); 1208 this.unknownFields = builder.getUnknownFields(); 1209 } Column(boolean noInit)1210 private Column(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 1211 1212 private static final Column defaultInstance; getDefaultInstance()1213 public static Column getDefaultInstance() { 1214 return defaultInstance; 1215 } 1216 getDefaultInstanceForType()1217 public Column getDefaultInstanceForType() { 1218 return defaultInstance; 1219 } 1220 1221 private final com.google.protobuf.UnknownFieldSet unknownFields; 1222 @java.lang.Override 1223 public final com.google.protobuf.UnknownFieldSet getUnknownFields()1224 getUnknownFields() { 1225 return this.unknownFields; 1226 } Column( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1227 private Column( 1228 com.google.protobuf.CodedInputStream input, 1229 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1230 throws com.google.protobuf.InvalidProtocolBufferException { 1231 initFields(); 1232 int mutable_bitField0_ = 0; 1233 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 1234 com.google.protobuf.UnknownFieldSet.newBuilder(); 1235 try { 1236 boolean done = false; 1237 while (!done) { 1238 int tag = input.readTag(); 1239 switch (tag) { 1240 case 0: 1241 done = true; 1242 break; 1243 default: { 1244 if (!parseUnknownField(input, unknownFields, 1245 extensionRegistry, tag)) { 1246 done = true; 1247 } 1248 break; 1249 } 1250 case 10: { 1251 bitField0_ |= 0x00000001; 1252 family_ = input.readBytes(); 1253 break; 1254 } 1255 case 18: { 1256 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 1257 qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); 1258 mutable_bitField0_ |= 0x00000002; 1259 } 1260 qualifier_.add(input.readBytes()); 1261 break; 1262 } 1263 } 1264 } 1265 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1266 throw e.setUnfinishedMessage(this); 1267 } catch (java.io.IOException e) { 1268 throw new com.google.protobuf.InvalidProtocolBufferException( 1269 e.getMessage()).setUnfinishedMessage(this); 1270 } finally { 1271 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 1272 qualifier_ = java.util.Collections.unmodifiableList(qualifier_); 1273 } 1274 this.unknownFields = unknownFields.build(); 1275 makeExtensionsImmutable(); 1276 } 1277 } 1278 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1279 getDescriptor() { 1280 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; 1281 } 1282 1283 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1284 internalGetFieldAccessorTable() { 1285 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable 1286 .ensureFieldAccessorsInitialized( 1287 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); 1288 } 1289 1290 public static com.google.protobuf.Parser<Column> PARSER = 1291 new com.google.protobuf.AbstractParser<Column>() { 1292 public Column parsePartialFrom( 1293 com.google.protobuf.CodedInputStream input, 1294 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1295 throws com.google.protobuf.InvalidProtocolBufferException { 1296 return new Column(input, extensionRegistry); 1297 } 1298 }; 1299 1300 @java.lang.Override getParserForType()1301 public com.google.protobuf.Parser<Column> getParserForType() { 1302 return PARSER; 1303 } 1304 1305 private int bitField0_; 1306 // required bytes family = 1; 1307 public static final int FAMILY_FIELD_NUMBER = 1; 1308 private com.google.protobuf.ByteString family_; 1309 /** 1310 * <code>required bytes family = 1;</code> 1311 */ hasFamily()1312 public boolean hasFamily() { 1313 return ((bitField0_ & 0x00000001) == 0x00000001); 1314 } 1315 /** 1316 * <code>required bytes family = 1;</code> 1317 */ getFamily()1318 public com.google.protobuf.ByteString getFamily() { 1319 return family_; 1320 } 1321 1322 // repeated bytes qualifier = 2; 1323 public static final int QUALIFIER_FIELD_NUMBER = 2; 1324 private java.util.List<com.google.protobuf.ByteString> qualifier_; 1325 /** 1326 * <code>repeated bytes qualifier = 2;</code> 1327 */ 1328 public java.util.List<com.google.protobuf.ByteString> getQualifierList()1329 getQualifierList() { 1330 return qualifier_; 1331 } 1332 /** 1333 * <code>repeated bytes qualifier = 2;</code> 1334 */ getQualifierCount()1335 public int getQualifierCount() { 1336 return qualifier_.size(); 1337 } 1338 /** 1339 * <code>repeated bytes qualifier = 2;</code> 1340 */ getQualifier(int index)1341 public com.google.protobuf.ByteString getQualifier(int index) { 1342 return qualifier_.get(index); 1343 } 1344 initFields()1345 private void initFields() { 1346 family_ = com.google.protobuf.ByteString.EMPTY; 1347 qualifier_ = java.util.Collections.emptyList(); 1348 } 1349 private byte memoizedIsInitialized = -1; isInitialized()1350 public final boolean isInitialized() { 1351 byte isInitialized = memoizedIsInitialized; 1352 if (isInitialized != -1) return isInitialized == 1; 1353 1354 if (!hasFamily()) { 1355 memoizedIsInitialized = 0; 1356 return false; 1357 } 1358 memoizedIsInitialized = 1; 1359 return true; 1360 } 1361 writeTo(com.google.protobuf.CodedOutputStream output)1362 public void writeTo(com.google.protobuf.CodedOutputStream output) 1363 throws java.io.IOException { 1364 getSerializedSize(); 1365 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1366 output.writeBytes(1, family_); 1367 } 1368 for (int i = 0; i < qualifier_.size(); i++) { 1369 output.writeBytes(2, qualifier_.get(i)); 1370 } 1371 getUnknownFields().writeTo(output); 1372 } 1373 1374 private int memoizedSerializedSize = -1; getSerializedSize()1375 public int getSerializedSize() { 1376 int size = memoizedSerializedSize; 1377 if (size != -1) return size; 1378 1379 size = 0; 1380 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1381 size += com.google.protobuf.CodedOutputStream 1382 .computeBytesSize(1, family_); 1383 } 1384 { 1385 int dataSize = 0; 1386 for (int i = 0; i < qualifier_.size(); i++) { 1387 dataSize += com.google.protobuf.CodedOutputStream 1388 .computeBytesSizeNoTag(qualifier_.get(i)); 1389 } 1390 size += dataSize; 1391 size += 1 * getQualifierList().size(); 1392 } 1393 size += getUnknownFields().getSerializedSize(); 1394 memoizedSerializedSize = size; 1395 return size; 1396 } 1397 1398 private static final long serialVersionUID = 0L; 1399 @java.lang.Override writeReplace()1400 protected java.lang.Object writeReplace() 1401 throws java.io.ObjectStreamException { 1402 return super.writeReplace(); 1403 } 1404 1405 @java.lang.Override equals(final java.lang.Object obj)1406 public boolean equals(final java.lang.Object obj) { 1407 if (obj == this) { 1408 return true; 1409 } 1410 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)) { 1411 return super.equals(obj); 1412 } 1413 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) obj; 1414 1415 boolean result = true; 1416 result = result && (hasFamily() == other.hasFamily()); 1417 if (hasFamily()) { 1418 result = result && getFamily() 1419 .equals(other.getFamily()); 1420 } 1421 result = result && getQualifierList() 1422 .equals(other.getQualifierList()); 1423 result = result && 1424 getUnknownFields().equals(other.getUnknownFields()); 1425 return result; 1426 } 1427 1428 private int memoizedHashCode = 0; 1429 @java.lang.Override hashCode()1430 public int hashCode() { 1431 if (memoizedHashCode != 0) { 1432 return memoizedHashCode; 1433 } 1434 int hash = 41; 1435 hash = (19 * hash) + getDescriptorForType().hashCode(); 1436 if (hasFamily()) { 1437 hash = (37 * hash) + FAMILY_FIELD_NUMBER; 1438 hash = (53 * hash) + getFamily().hashCode(); 1439 } 1440 if (getQualifierCount() > 0) { 1441 hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; 1442 hash = (53 * hash) + getQualifierList().hashCode(); 1443 } 1444 hash = (29 * hash) + getUnknownFields().hashCode(); 1445 memoizedHashCode = hash; 1446 return hash; 1447 } 1448 parseFrom( com.google.protobuf.ByteString data)1449 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( 1450 com.google.protobuf.ByteString data) 1451 throws com.google.protobuf.InvalidProtocolBufferException { 1452 return PARSER.parseFrom(data); 1453 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1454 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( 1455 com.google.protobuf.ByteString data, 1456 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1457 throws com.google.protobuf.InvalidProtocolBufferException { 1458 return PARSER.parseFrom(data, extensionRegistry); 1459 } parseFrom(byte[] data)1460 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(byte[] data) 1461 throws com.google.protobuf.InvalidProtocolBufferException { 1462 return PARSER.parseFrom(data); 1463 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1464 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( 1465 byte[] data, 1466 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1467 throws com.google.protobuf.InvalidProtocolBufferException { 1468 return PARSER.parseFrom(data, extensionRegistry); 1469 } parseFrom(java.io.InputStream input)1470 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom(java.io.InputStream input) 1471 throws java.io.IOException { 1472 return PARSER.parseFrom(input); 1473 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1474 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( 1475 java.io.InputStream input, 1476 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1477 throws java.io.IOException { 1478 return PARSER.parseFrom(input, extensionRegistry); 1479 } parseDelimitedFrom(java.io.InputStream input)1480 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom(java.io.InputStream input) 1481 throws java.io.IOException { 1482 return PARSER.parseDelimitedFrom(input); 1483 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1484 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseDelimitedFrom( 1485 java.io.InputStream input, 1486 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1487 throws java.io.IOException { 1488 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1489 } parseFrom( com.google.protobuf.CodedInputStream input)1490 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( 1491 com.google.protobuf.CodedInputStream input) 1492 throws java.io.IOException { 1493 return PARSER.parseFrom(input); 1494 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1495 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parseFrom( 1496 com.google.protobuf.CodedInputStream input, 1497 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1498 throws java.io.IOException { 1499 return PARSER.parseFrom(input, extensionRegistry); 1500 } 1501 newBuilder()1502 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()1503 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype)1504 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column prototype) { 1505 return newBuilder().mergeFrom(prototype); 1506 } toBuilder()1507 public Builder toBuilder() { return newBuilder(this); } 1508 1509 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1510 protected Builder newBuilderForType( 1511 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1512 Builder builder = new Builder(parent); 1513 return builder; 1514 } 1515 /** 1516 * Protobuf type {@code Column} 1517 * 1518 * <pre> 1519 ** 1520 * Container for a list of column qualifier names of a family. 1521 * </pre> 1522 */ 1523 public static final class Builder extends 1524 com.google.protobuf.GeneratedMessage.Builder<Builder> 1525 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder { 1526 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1527 getDescriptor() { 1528 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; 1529 } 1530 1531 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1532 internalGetFieldAccessorTable() { 1533 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_fieldAccessorTable 1534 .ensureFieldAccessorsInitialized( 1535 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder.class); 1536 } 1537 1538 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.newBuilder() Builder()1539 private Builder() { 1540 maybeForceBuilderInitialization(); 1541 } 1542 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1543 private Builder( 1544 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1545 super(parent); 1546 maybeForceBuilderInitialization(); 1547 } maybeForceBuilderInitialization()1548 private void maybeForceBuilderInitialization() { 1549 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1550 } 1551 } create()1552 private static Builder create() { 1553 return new Builder(); 1554 } 1555 clear()1556 public Builder clear() { 1557 super.clear(); 1558 family_ = com.google.protobuf.ByteString.EMPTY; 1559 bitField0_ = (bitField0_ & ~0x00000001); 1560 qualifier_ = java.util.Collections.emptyList(); 1561 bitField0_ = (bitField0_ & ~0x00000002); 1562 return this; 1563 } 1564 clone()1565 public Builder clone() { 1566 return create().mergeFrom(buildPartial()); 1567 } 1568 1569 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()1570 getDescriptorForType() { 1571 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Column_descriptor; 1572 } 1573 getDefaultInstanceForType()1574 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getDefaultInstanceForType() { 1575 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance(); 1576 } 1577 build()1578 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column build() { 1579 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = buildPartial(); 1580 if (!result.isInitialized()) { 1581 throw newUninitializedMessageException(result); 1582 } 1583 return result; 1584 } 1585 buildPartial()1586 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column buildPartial() { 1587 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column(this); 1588 int from_bitField0_ = bitField0_; 1589 int to_bitField0_ = 0; 1590 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1591 to_bitField0_ |= 0x00000001; 1592 } 1593 result.family_ = family_; 1594 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1595 qualifier_ = java.util.Collections.unmodifiableList(qualifier_); 1596 bitField0_ = (bitField0_ & ~0x00000002); 1597 } 1598 result.qualifier_ = qualifier_; 1599 result.bitField0_ = to_bitField0_; 1600 onBuilt(); 1601 return result; 1602 } 1603 mergeFrom(com.google.protobuf.Message other)1604 public Builder mergeFrom(com.google.protobuf.Message other) { 1605 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) { 1606 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column)other); 1607 } else { 1608 super.mergeFrom(other); 1609 return this; 1610 } 1611 } 1612 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other)1613 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column other) { 1614 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()) return this; 1615 if (other.hasFamily()) { 1616 setFamily(other.getFamily()); 1617 } 1618 if (!other.qualifier_.isEmpty()) { 1619 if (qualifier_.isEmpty()) { 1620 qualifier_ = other.qualifier_; 1621 bitField0_ = (bitField0_ & ~0x00000002); 1622 } else { 1623 ensureQualifierIsMutable(); 1624 qualifier_.addAll(other.qualifier_); 1625 } 1626 onChanged(); 1627 } 1628 this.mergeUnknownFields(other.getUnknownFields()); 1629 return this; 1630 } 1631 isInitialized()1632 public final boolean isInitialized() { 1633 if (!hasFamily()) { 1634 1635 return false; 1636 } 1637 return true; 1638 } 1639 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1640 public Builder mergeFrom( 1641 com.google.protobuf.CodedInputStream input, 1642 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1643 throws java.io.IOException { 1644 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column parsedMessage = null; 1645 try { 1646 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1647 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1648 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column) e.getUnfinishedMessage(); 1649 throw e; 1650 } finally { 1651 if (parsedMessage != null) { 1652 mergeFrom(parsedMessage); 1653 } 1654 } 1655 return this; 1656 } 1657 private int bitField0_; 1658 1659 // required bytes family = 1; 1660 private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; 1661 /** 1662 * <code>required bytes family = 1;</code> 1663 */ hasFamily()1664 public boolean hasFamily() { 1665 return ((bitField0_ & 0x00000001) == 0x00000001); 1666 } 1667 /** 1668 * <code>required bytes family = 1;</code> 1669 */ getFamily()1670 public com.google.protobuf.ByteString getFamily() { 1671 return family_; 1672 } 1673 /** 1674 * <code>required bytes family = 1;</code> 1675 */ setFamily(com.google.protobuf.ByteString value)1676 public Builder setFamily(com.google.protobuf.ByteString value) { 1677 if (value == null) { 1678 throw new NullPointerException(); 1679 } 1680 bitField0_ |= 0x00000001; 1681 family_ = value; 1682 onChanged(); 1683 return this; 1684 } 1685 /** 1686 * <code>required bytes family = 1;</code> 1687 */ clearFamily()1688 public Builder clearFamily() { 1689 bitField0_ = (bitField0_ & ~0x00000001); 1690 family_ = getDefaultInstance().getFamily(); 1691 onChanged(); 1692 return this; 1693 } 1694 1695 // repeated bytes qualifier = 2; 1696 private java.util.List<com.google.protobuf.ByteString> qualifier_ = java.util.Collections.emptyList(); ensureQualifierIsMutable()1697 private void ensureQualifierIsMutable() { 1698 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 1699 qualifier_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifier_); 1700 bitField0_ |= 0x00000002; 1701 } 1702 } 1703 /** 1704 * <code>repeated bytes qualifier = 2;</code> 1705 */ 1706 public java.util.List<com.google.protobuf.ByteString> getQualifierList()1707 getQualifierList() { 1708 return java.util.Collections.unmodifiableList(qualifier_); 1709 } 1710 /** 1711 * <code>repeated bytes qualifier = 2;</code> 1712 */ getQualifierCount()1713 public int getQualifierCount() { 1714 return qualifier_.size(); 1715 } 1716 /** 1717 * <code>repeated bytes qualifier = 2;</code> 1718 */ getQualifier(int index)1719 public com.google.protobuf.ByteString getQualifier(int index) { 1720 return qualifier_.get(index); 1721 } 1722 /** 1723 * <code>repeated bytes qualifier = 2;</code> 1724 */ setQualifier( int index, com.google.protobuf.ByteString value)1725 public Builder setQualifier( 1726 int index, com.google.protobuf.ByteString value) { 1727 if (value == null) { 1728 throw new NullPointerException(); 1729 } 1730 ensureQualifierIsMutable(); 1731 qualifier_.set(index, value); 1732 onChanged(); 1733 return this; 1734 } 1735 /** 1736 * <code>repeated bytes qualifier = 2;</code> 1737 */ addQualifier(com.google.protobuf.ByteString value)1738 public Builder addQualifier(com.google.protobuf.ByteString value) { 1739 if (value == null) { 1740 throw new NullPointerException(); 1741 } 1742 ensureQualifierIsMutable(); 1743 qualifier_.add(value); 1744 onChanged(); 1745 return this; 1746 } 1747 /** 1748 * <code>repeated bytes qualifier = 2;</code> 1749 */ addAllQualifier( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)1750 public Builder addAllQualifier( 1751 java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { 1752 ensureQualifierIsMutable(); 1753 super.addAll(values, qualifier_); 1754 onChanged(); 1755 return this; 1756 } 1757 /** 1758 * <code>repeated bytes qualifier = 2;</code> 1759 */ clearQualifier()1760 public Builder clearQualifier() { 1761 qualifier_ = java.util.Collections.emptyList(); 1762 bitField0_ = (bitField0_ & ~0x00000002); 1763 onChanged(); 1764 return this; 1765 } 1766 1767 // @@protoc_insertion_point(builder_scope:Column) 1768 } 1769 1770 static { 1771 defaultInstance = new Column(true); defaultInstance.initFields()1772 defaultInstance.initFields(); 1773 } 1774 1775 // @@protoc_insertion_point(class_scope:Column) 1776 } 1777 1778 public interface GetOrBuilder 1779 extends com.google.protobuf.MessageOrBuilder { 1780 1781 // required bytes row = 1; 1782 /** 1783 * <code>required bytes row = 1;</code> 1784 */ hasRow()1785 boolean hasRow(); 1786 /** 1787 * <code>required bytes row = 1;</code> 1788 */ getRow()1789 com.google.protobuf.ByteString getRow(); 1790 1791 // repeated .Column column = 2; 1792 /** 1793 * <code>repeated .Column column = 2;</code> 1794 */ 1795 java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList()1796 getColumnList(); 1797 /** 1798 * <code>repeated .Column column = 2;</code> 1799 */ getColumn(int index)1800 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); 1801 /** 1802 * <code>repeated .Column column = 2;</code> 1803 */ getColumnCount()1804 int getColumnCount(); 1805 /** 1806 * <code>repeated .Column column = 2;</code> 1807 */ 1808 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList()1809 getColumnOrBuilderList(); 1810 /** 1811 * <code>repeated .Column column = 2;</code> 1812 */ getColumnOrBuilder( int index)1813 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( 1814 int index); 1815 1816 // repeated .NameBytesPair attribute = 3; 1817 /** 1818 * <code>repeated .NameBytesPair attribute = 3;</code> 1819 */ 1820 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList()1821 getAttributeList(); 1822 /** 1823 * <code>repeated .NameBytesPair attribute = 3;</code> 1824 */ getAttribute(int index)1825 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); 1826 /** 1827 * <code>repeated .NameBytesPair attribute = 3;</code> 1828 */ getAttributeCount()1829 int getAttributeCount(); 1830 /** 1831 * <code>repeated .NameBytesPair attribute = 3;</code> 1832 */ 1833 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()1834 getAttributeOrBuilderList(); 1835 /** 1836 * <code>repeated .NameBytesPair attribute = 3;</code> 1837 */ getAttributeOrBuilder( int index)1838 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 1839 int index); 1840 1841 // optional .Filter filter = 4; 1842 /** 1843 * <code>optional .Filter filter = 4;</code> 1844 */ hasFilter()1845 boolean hasFilter(); 1846 /** 1847 * <code>optional .Filter filter = 4;</code> 1848 */ getFilter()1849 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter(); 1850 /** 1851 * <code>optional .Filter filter = 4;</code> 1852 */ getFilterOrBuilder()1853 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); 1854 1855 // optional .TimeRange time_range = 5; 1856 /** 1857 * <code>optional .TimeRange time_range = 5;</code> 1858 */ hasTimeRange()1859 boolean hasTimeRange(); 1860 /** 1861 * <code>optional .TimeRange time_range = 5;</code> 1862 */ getTimeRange()1863 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); 1864 /** 1865 * <code>optional .TimeRange time_range = 5;</code> 1866 */ getTimeRangeOrBuilder()1867 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); 1868 1869 // optional uint32 max_versions = 6 [default = 1]; 1870 /** 1871 * <code>optional uint32 max_versions = 6 [default = 1];</code> 1872 */ hasMaxVersions()1873 boolean hasMaxVersions(); 1874 /** 1875 * <code>optional uint32 max_versions = 6 [default = 1];</code> 1876 */ getMaxVersions()1877 int getMaxVersions(); 1878 1879 // optional bool cache_blocks = 7 [default = true]; 1880 /** 1881 * <code>optional bool cache_blocks = 7 [default = true];</code> 1882 */ hasCacheBlocks()1883 boolean hasCacheBlocks(); 1884 /** 1885 * <code>optional bool cache_blocks = 7 [default = true];</code> 1886 */ getCacheBlocks()1887 boolean getCacheBlocks(); 1888 1889 // optional uint32 store_limit = 8; 1890 /** 1891 * <code>optional uint32 store_limit = 8;</code> 1892 */ hasStoreLimit()1893 boolean hasStoreLimit(); 1894 /** 1895 * <code>optional uint32 store_limit = 8;</code> 1896 */ getStoreLimit()1897 int getStoreLimit(); 1898 1899 // optional uint32 store_offset = 9; 1900 /** 1901 * <code>optional uint32 store_offset = 9;</code> 1902 */ hasStoreOffset()1903 boolean hasStoreOffset(); 1904 /** 1905 * <code>optional uint32 store_offset = 9;</code> 1906 */ getStoreOffset()1907 int getStoreOffset(); 1908 1909 // optional bool existence_only = 10 [default = false]; 1910 /** 1911 * <code>optional bool existence_only = 10 [default = false];</code> 1912 * 1913 * <pre> 1914 * The result isn't asked for, just check for 1915 * the existence. 1916 * </pre> 1917 */ hasExistenceOnly()1918 boolean hasExistenceOnly(); 1919 /** 1920 * <code>optional bool existence_only = 10 [default = false];</code> 1921 * 1922 * <pre> 1923 * The result isn't asked for, just check for 1924 * the existence. 1925 * </pre> 1926 */ getExistenceOnly()1927 boolean getExistenceOnly(); 1928 1929 // optional bool closest_row_before = 11 [default = false]; 1930 /** 1931 * <code>optional bool closest_row_before = 11 [default = false];</code> 1932 * 1933 * <pre> 1934 * If the row to get doesn't exist, return the 1935 * closest row before. 1936 * </pre> 1937 */ hasClosestRowBefore()1938 boolean hasClosestRowBefore(); 1939 /** 1940 * <code>optional bool closest_row_before = 11 [default = false];</code> 1941 * 1942 * <pre> 1943 * If the row to get doesn't exist, return the 1944 * closest row before. 1945 * </pre> 1946 */ getClosestRowBefore()1947 boolean getClosestRowBefore(); 1948 1949 // optional .Consistency consistency = 12 [default = STRONG]; 1950 /** 1951 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 1952 */ hasConsistency()1953 boolean hasConsistency(); 1954 /** 1955 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 1956 */ getConsistency()1957 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency(); 1958 1959 // repeated .ColumnFamilyTimeRange cf_time_range = 13; 1960 /** 1961 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 1962 */ 1963 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList()1964 getCfTimeRangeList(); 1965 /** 1966 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 1967 */ getCfTimeRange(int index)1968 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index); 1969 /** 1970 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 1971 */ getCfTimeRangeCount()1972 int getCfTimeRangeCount(); 1973 /** 1974 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 1975 */ 1976 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList()1977 getCfTimeRangeOrBuilderList(); 1978 /** 1979 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 1980 */ getCfTimeRangeOrBuilder( int index)1981 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( 1982 int index); 1983 } 1984 /** 1985 * Protobuf type {@code Get} 1986 * 1987 * <pre> 1988 ** 1989 * The protocol buffer version of Get. 1990 * Unless existence_only is specified, return all the requested data 1991 * for the row that matches exactly, or the one that immediately 1992 * precedes it if closest_row_before is specified. 1993 * </pre> 1994 */ 1995 public static final class Get extends 1996 com.google.protobuf.GeneratedMessage 1997 implements GetOrBuilder { 1998 // Use Get.newBuilder() to construct. Get(com.google.protobuf.GeneratedMessage.Builder<?> builder)1999 private Get(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2000 super(builder); 2001 this.unknownFields = builder.getUnknownFields(); 2002 } Get(boolean noInit)2003 private Get(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2004 2005 private static final Get defaultInstance; getDefaultInstance()2006 public static Get getDefaultInstance() { 2007 return defaultInstance; 2008 } 2009 getDefaultInstanceForType()2010 public Get getDefaultInstanceForType() { 2011 return defaultInstance; 2012 } 2013 2014 private final com.google.protobuf.UnknownFieldSet unknownFields; 2015 @java.lang.Override 2016 public final com.google.protobuf.UnknownFieldSet getUnknownFields()2017 getUnknownFields() { 2018 return this.unknownFields; 2019 } Get( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2020 private Get( 2021 com.google.protobuf.CodedInputStream input, 2022 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2023 throws com.google.protobuf.InvalidProtocolBufferException { 2024 initFields(); 2025 int mutable_bitField0_ = 0; 2026 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2027 com.google.protobuf.UnknownFieldSet.newBuilder(); 2028 try { 2029 boolean done = false; 2030 while (!done) { 2031 int tag = input.readTag(); 2032 switch (tag) { 2033 case 0: 2034 done = true; 2035 break; 2036 default: { 2037 if (!parseUnknownField(input, unknownFields, 2038 extensionRegistry, tag)) { 2039 done = true; 2040 } 2041 break; 2042 } 2043 case 10: { 2044 bitField0_ |= 0x00000001; 2045 row_ = input.readBytes(); 2046 break; 2047 } 2048 case 18: { 2049 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 2050 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(); 2051 mutable_bitField0_ |= 0x00000002; 2052 } 2053 column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); 2054 break; 2055 } 2056 case 26: { 2057 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 2058 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(); 2059 mutable_bitField0_ |= 0x00000004; 2060 } 2061 attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); 2062 break; 2063 } 2064 case 34: { 2065 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; 2066 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2067 subBuilder = filter_.toBuilder(); 2068 } 2069 filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); 2070 if (subBuilder != null) { 2071 subBuilder.mergeFrom(filter_); 2072 filter_ = subBuilder.buildPartial(); 2073 } 2074 bitField0_ |= 0x00000002; 2075 break; 2076 } 2077 case 42: { 2078 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; 2079 if (((bitField0_ & 0x00000004) == 0x00000004)) { 2080 subBuilder = timeRange_.toBuilder(); 2081 } 2082 timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); 2083 if (subBuilder != null) { 2084 subBuilder.mergeFrom(timeRange_); 2085 timeRange_ = subBuilder.buildPartial(); 2086 } 2087 bitField0_ |= 0x00000004; 2088 break; 2089 } 2090 case 48: { 2091 bitField0_ |= 0x00000008; 2092 maxVersions_ = input.readUInt32(); 2093 break; 2094 } 2095 case 56: { 2096 bitField0_ |= 0x00000010; 2097 cacheBlocks_ = input.readBool(); 2098 break; 2099 } 2100 case 64: { 2101 bitField0_ |= 0x00000020; 2102 storeLimit_ = input.readUInt32(); 2103 break; 2104 } 2105 case 72: { 2106 bitField0_ |= 0x00000040; 2107 storeOffset_ = input.readUInt32(); 2108 break; 2109 } 2110 case 80: { 2111 bitField0_ |= 0x00000080; 2112 existenceOnly_ = input.readBool(); 2113 break; 2114 } 2115 case 88: { 2116 bitField0_ |= 0x00000100; 2117 closestRowBefore_ = input.readBool(); 2118 break; 2119 } 2120 case 96: { 2121 int rawValue = input.readEnum(); 2122 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue); 2123 if (value == null) { 2124 unknownFields.mergeVarintField(12, rawValue); 2125 } else { 2126 bitField0_ |= 0x00000200; 2127 consistency_ = value; 2128 } 2129 break; 2130 } 2131 case 106: { 2132 if (!((mutable_bitField0_ & 0x00001000) == 0x00001000)) { 2133 cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(); 2134 mutable_bitField0_ |= 0x00001000; 2135 } 2136 cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); 2137 break; 2138 } 2139 } 2140 } 2141 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2142 throw e.setUnfinishedMessage(this); 2143 } catch (java.io.IOException e) { 2144 throw new com.google.protobuf.InvalidProtocolBufferException( 2145 e.getMessage()).setUnfinishedMessage(this); 2146 } finally { 2147 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 2148 column_ = java.util.Collections.unmodifiableList(column_); 2149 } 2150 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 2151 attribute_ = java.util.Collections.unmodifiableList(attribute_); 2152 } 2153 if (((mutable_bitField0_ & 0x00001000) == 0x00001000)) { 2154 cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); 2155 } 2156 this.unknownFields = unknownFields.build(); 2157 makeExtensionsImmutable(); 2158 } 2159 } 2160 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2161 getDescriptor() { 2162 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; 2163 } 2164 2165 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2166 internalGetFieldAccessorTable() { 2167 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable 2168 .ensureFieldAccessorsInitialized( 2169 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); 2170 } 2171 2172 public static com.google.protobuf.Parser<Get> PARSER = 2173 new com.google.protobuf.AbstractParser<Get>() { 2174 public Get parsePartialFrom( 2175 com.google.protobuf.CodedInputStream input, 2176 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2177 throws com.google.protobuf.InvalidProtocolBufferException { 2178 return new Get(input, extensionRegistry); 2179 } 2180 }; 2181 2182 @java.lang.Override getParserForType()2183 public com.google.protobuf.Parser<Get> getParserForType() { 2184 return PARSER; 2185 } 2186 2187 private int bitField0_; 2188 // required bytes row = 1; 2189 public static final int ROW_FIELD_NUMBER = 1; 2190 private com.google.protobuf.ByteString row_; 2191 /** 2192 * <code>required bytes row = 1;</code> 2193 */ hasRow()2194 public boolean hasRow() { 2195 return ((bitField0_ & 0x00000001) == 0x00000001); 2196 } 2197 /** 2198 * <code>required bytes row = 1;</code> 2199 */ getRow()2200 public com.google.protobuf.ByteString getRow() { 2201 return row_; 2202 } 2203 2204 // repeated .Column column = 2; 2205 public static final int COLUMN_FIELD_NUMBER = 2; 2206 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_; 2207 /** 2208 * <code>repeated .Column column = 2;</code> 2209 */ getColumnList()2210 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() { 2211 return column_; 2212 } 2213 /** 2214 * <code>repeated .Column column = 2;</code> 2215 */ 2216 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList()2217 getColumnOrBuilderList() { 2218 return column_; 2219 } 2220 /** 2221 * <code>repeated .Column column = 2;</code> 2222 */ getColumnCount()2223 public int getColumnCount() { 2224 return column_.size(); 2225 } 2226 /** 2227 * <code>repeated .Column column = 2;</code> 2228 */ getColumn(int index)2229 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { 2230 return column_.get(index); 2231 } 2232 /** 2233 * <code>repeated .Column column = 2;</code> 2234 */ getColumnOrBuilder( int index)2235 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( 2236 int index) { 2237 return column_.get(index); 2238 } 2239 2240 // repeated .NameBytesPair attribute = 3; 2241 public static final int ATTRIBUTE_FIELD_NUMBER = 3; 2242 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_; 2243 /** 2244 * <code>repeated .NameBytesPair attribute = 3;</code> 2245 */ getAttributeList()2246 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { 2247 return attribute_; 2248 } 2249 /** 2250 * <code>repeated .NameBytesPair attribute = 3;</code> 2251 */ 2252 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()2253 getAttributeOrBuilderList() { 2254 return attribute_; 2255 } 2256 /** 2257 * <code>repeated .NameBytesPair attribute = 3;</code> 2258 */ getAttributeCount()2259 public int getAttributeCount() { 2260 return attribute_.size(); 2261 } 2262 /** 2263 * <code>repeated .NameBytesPair attribute = 3;</code> 2264 */ getAttribute(int index)2265 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { 2266 return attribute_.get(index); 2267 } 2268 /** 2269 * <code>repeated .NameBytesPair attribute = 3;</code> 2270 */ getAttributeOrBuilder( int index)2271 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 2272 int index) { 2273 return attribute_.get(index); 2274 } 2275 2276 // optional .Filter filter = 4; 2277 public static final int FILTER_FIELD_NUMBER = 4; 2278 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_; 2279 /** 2280 * <code>optional .Filter filter = 4;</code> 2281 */ hasFilter()2282 public boolean hasFilter() { 2283 return ((bitField0_ & 0x00000002) == 0x00000002); 2284 } 2285 /** 2286 * <code>optional .Filter filter = 4;</code> 2287 */ getFilter()2288 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { 2289 return filter_; 2290 } 2291 /** 2292 * <code>optional .Filter filter = 4;</code> 2293 */ getFilterOrBuilder()2294 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { 2295 return filter_; 2296 } 2297 2298 // optional .TimeRange time_range = 5; 2299 public static final int TIME_RANGE_FIELD_NUMBER = 5; 2300 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; 2301 /** 2302 * <code>optional .TimeRange time_range = 5;</code> 2303 */ hasTimeRange()2304 public boolean hasTimeRange() { 2305 return ((bitField0_ & 0x00000004) == 0x00000004); 2306 } 2307 /** 2308 * <code>optional .TimeRange time_range = 5;</code> 2309 */ getTimeRange()2310 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 2311 return timeRange_; 2312 } 2313 /** 2314 * <code>optional .TimeRange time_range = 5;</code> 2315 */ getTimeRangeOrBuilder()2316 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 2317 return timeRange_; 2318 } 2319 2320 // optional uint32 max_versions = 6 [default = 1]; 2321 public static final int MAX_VERSIONS_FIELD_NUMBER = 6; 2322 private int maxVersions_; 2323 /** 2324 * <code>optional uint32 max_versions = 6 [default = 1];</code> 2325 */ hasMaxVersions()2326 public boolean hasMaxVersions() { 2327 return ((bitField0_ & 0x00000008) == 0x00000008); 2328 } 2329 /** 2330 * <code>optional uint32 max_versions = 6 [default = 1];</code> 2331 */ getMaxVersions()2332 public int getMaxVersions() { 2333 return maxVersions_; 2334 } 2335 2336 // optional bool cache_blocks = 7 [default = true]; 2337 public static final int CACHE_BLOCKS_FIELD_NUMBER = 7; 2338 private boolean cacheBlocks_; 2339 /** 2340 * <code>optional bool cache_blocks = 7 [default = true];</code> 2341 */ hasCacheBlocks()2342 public boolean hasCacheBlocks() { 2343 return ((bitField0_ & 0x00000010) == 0x00000010); 2344 } 2345 /** 2346 * <code>optional bool cache_blocks = 7 [default = true];</code> 2347 */ getCacheBlocks()2348 public boolean getCacheBlocks() { 2349 return cacheBlocks_; 2350 } 2351 2352 // optional uint32 store_limit = 8; 2353 public static final int STORE_LIMIT_FIELD_NUMBER = 8; 2354 private int storeLimit_; 2355 /** 2356 * <code>optional uint32 store_limit = 8;</code> 2357 */ hasStoreLimit()2358 public boolean hasStoreLimit() { 2359 return ((bitField0_ & 0x00000020) == 0x00000020); 2360 } 2361 /** 2362 * <code>optional uint32 store_limit = 8;</code> 2363 */ getStoreLimit()2364 public int getStoreLimit() { 2365 return storeLimit_; 2366 } 2367 2368 // optional uint32 store_offset = 9; 2369 public static final int STORE_OFFSET_FIELD_NUMBER = 9; 2370 private int storeOffset_; 2371 /** 2372 * <code>optional uint32 store_offset = 9;</code> 2373 */ hasStoreOffset()2374 public boolean hasStoreOffset() { 2375 return ((bitField0_ & 0x00000040) == 0x00000040); 2376 } 2377 /** 2378 * <code>optional uint32 store_offset = 9;</code> 2379 */ getStoreOffset()2380 public int getStoreOffset() { 2381 return storeOffset_; 2382 } 2383 2384 // optional bool existence_only = 10 [default = false]; 2385 public static final int EXISTENCE_ONLY_FIELD_NUMBER = 10; 2386 private boolean existenceOnly_; 2387 /** 2388 * <code>optional bool existence_only = 10 [default = false];</code> 2389 * 2390 * <pre> 2391 * The result isn't asked for, just check for 2392 * the existence. 2393 * </pre> 2394 */ hasExistenceOnly()2395 public boolean hasExistenceOnly() { 2396 return ((bitField0_ & 0x00000080) == 0x00000080); 2397 } 2398 /** 2399 * <code>optional bool existence_only = 10 [default = false];</code> 2400 * 2401 * <pre> 2402 * The result isn't asked for, just check for 2403 * the existence. 2404 * </pre> 2405 */ getExistenceOnly()2406 public boolean getExistenceOnly() { 2407 return existenceOnly_; 2408 } 2409 2410 // optional bool closest_row_before = 11 [default = false]; 2411 public static final int CLOSEST_ROW_BEFORE_FIELD_NUMBER = 11; 2412 private boolean closestRowBefore_; 2413 /** 2414 * <code>optional bool closest_row_before = 11 [default = false];</code> 2415 * 2416 * <pre> 2417 * If the row to get doesn't exist, return the 2418 * closest row before. 2419 * </pre> 2420 */ hasClosestRowBefore()2421 public boolean hasClosestRowBefore() { 2422 return ((bitField0_ & 0x00000100) == 0x00000100); 2423 } 2424 /** 2425 * <code>optional bool closest_row_before = 11 [default = false];</code> 2426 * 2427 * <pre> 2428 * If the row to get doesn't exist, return the 2429 * closest row before. 2430 * </pre> 2431 */ getClosestRowBefore()2432 public boolean getClosestRowBefore() { 2433 return closestRowBefore_; 2434 } 2435 2436 // optional .Consistency consistency = 12 [default = STRONG]; 2437 public static final int CONSISTENCY_FIELD_NUMBER = 12; 2438 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_; 2439 /** 2440 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 2441 */ hasConsistency()2442 public boolean hasConsistency() { 2443 return ((bitField0_ & 0x00000200) == 0x00000200); 2444 } 2445 /** 2446 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 2447 */ getConsistency()2448 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() { 2449 return consistency_; 2450 } 2451 2452 // repeated .ColumnFamilyTimeRange cf_time_range = 13; 2453 public static final int CF_TIME_RANGE_FIELD_NUMBER = 13; 2454 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_; 2455 /** 2456 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 2457 */ getCfTimeRangeList()2458 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { 2459 return cfTimeRange_; 2460 } 2461 /** 2462 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 2463 */ 2464 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList()2465 getCfTimeRangeOrBuilderList() { 2466 return cfTimeRange_; 2467 } 2468 /** 2469 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 2470 */ getCfTimeRangeCount()2471 public int getCfTimeRangeCount() { 2472 return cfTimeRange_.size(); 2473 } 2474 /** 2475 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 2476 */ getCfTimeRange(int index)2477 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { 2478 return cfTimeRange_.get(index); 2479 } 2480 /** 2481 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 2482 */ getCfTimeRangeOrBuilder( int index)2483 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( 2484 int index) { 2485 return cfTimeRange_.get(index); 2486 } 2487 initFields()2488 private void initFields() { 2489 row_ = com.google.protobuf.ByteString.EMPTY; 2490 column_ = java.util.Collections.emptyList(); 2491 attribute_ = java.util.Collections.emptyList(); 2492 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 2493 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 2494 maxVersions_ = 1; 2495 cacheBlocks_ = true; 2496 storeLimit_ = 0; 2497 storeOffset_ = 0; 2498 existenceOnly_ = false; 2499 closestRowBefore_ = false; 2500 consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 2501 cfTimeRange_ = java.util.Collections.emptyList(); 2502 } 2503 private byte memoizedIsInitialized = -1; isInitialized()2504 public final boolean isInitialized() { 2505 byte isInitialized = memoizedIsInitialized; 2506 if (isInitialized != -1) return isInitialized == 1; 2507 2508 if (!hasRow()) { 2509 memoizedIsInitialized = 0; 2510 return false; 2511 } 2512 for (int i = 0; i < getColumnCount(); i++) { 2513 if (!getColumn(i).isInitialized()) { 2514 memoizedIsInitialized = 0; 2515 return false; 2516 } 2517 } 2518 for (int i = 0; i < getAttributeCount(); i++) { 2519 if (!getAttribute(i).isInitialized()) { 2520 memoizedIsInitialized = 0; 2521 return false; 2522 } 2523 } 2524 if (hasFilter()) { 2525 if (!getFilter().isInitialized()) { 2526 memoizedIsInitialized = 0; 2527 return false; 2528 } 2529 } 2530 for (int i = 0; i < getCfTimeRangeCount(); i++) { 2531 if (!getCfTimeRange(i).isInitialized()) { 2532 memoizedIsInitialized = 0; 2533 return false; 2534 } 2535 } 2536 memoizedIsInitialized = 1; 2537 return true; 2538 } 2539 writeTo(com.google.protobuf.CodedOutputStream output)2540 public void writeTo(com.google.protobuf.CodedOutputStream output) 2541 throws java.io.IOException { 2542 getSerializedSize(); 2543 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2544 output.writeBytes(1, row_); 2545 } 2546 for (int i = 0; i < column_.size(); i++) { 2547 output.writeMessage(2, column_.get(i)); 2548 } 2549 for (int i = 0; i < attribute_.size(); i++) { 2550 output.writeMessage(3, attribute_.get(i)); 2551 } 2552 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2553 output.writeMessage(4, filter_); 2554 } 2555 if (((bitField0_ & 0x00000004) == 0x00000004)) { 2556 output.writeMessage(5, timeRange_); 2557 } 2558 if (((bitField0_ & 0x00000008) == 0x00000008)) { 2559 output.writeUInt32(6, maxVersions_); 2560 } 2561 if (((bitField0_ & 0x00000010) == 0x00000010)) { 2562 output.writeBool(7, cacheBlocks_); 2563 } 2564 if (((bitField0_ & 0x00000020) == 0x00000020)) { 2565 output.writeUInt32(8, storeLimit_); 2566 } 2567 if (((bitField0_ & 0x00000040) == 0x00000040)) { 2568 output.writeUInt32(9, storeOffset_); 2569 } 2570 if (((bitField0_ & 0x00000080) == 0x00000080)) { 2571 output.writeBool(10, existenceOnly_); 2572 } 2573 if (((bitField0_ & 0x00000100) == 0x00000100)) { 2574 output.writeBool(11, closestRowBefore_); 2575 } 2576 if (((bitField0_ & 0x00000200) == 0x00000200)) { 2577 output.writeEnum(12, consistency_.getNumber()); 2578 } 2579 for (int i = 0; i < cfTimeRange_.size(); i++) { 2580 output.writeMessage(13, cfTimeRange_.get(i)); 2581 } 2582 getUnknownFields().writeTo(output); 2583 } 2584 2585 private int memoizedSerializedSize = -1; getSerializedSize()2586 public int getSerializedSize() { 2587 int size = memoizedSerializedSize; 2588 if (size != -1) return size; 2589 2590 size = 0; 2591 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2592 size += com.google.protobuf.CodedOutputStream 2593 .computeBytesSize(1, row_); 2594 } 2595 for (int i = 0; i < column_.size(); i++) { 2596 size += com.google.protobuf.CodedOutputStream 2597 .computeMessageSize(2, column_.get(i)); 2598 } 2599 for (int i = 0; i < attribute_.size(); i++) { 2600 size += com.google.protobuf.CodedOutputStream 2601 .computeMessageSize(3, attribute_.get(i)); 2602 } 2603 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2604 size += com.google.protobuf.CodedOutputStream 2605 .computeMessageSize(4, filter_); 2606 } 2607 if (((bitField0_ & 0x00000004) == 0x00000004)) { 2608 size += com.google.protobuf.CodedOutputStream 2609 .computeMessageSize(5, timeRange_); 2610 } 2611 if (((bitField0_ & 0x00000008) == 0x00000008)) { 2612 size += com.google.protobuf.CodedOutputStream 2613 .computeUInt32Size(6, maxVersions_); 2614 } 2615 if (((bitField0_ & 0x00000010) == 0x00000010)) { 2616 size += com.google.protobuf.CodedOutputStream 2617 .computeBoolSize(7, cacheBlocks_); 2618 } 2619 if (((bitField0_ & 0x00000020) == 0x00000020)) { 2620 size += com.google.protobuf.CodedOutputStream 2621 .computeUInt32Size(8, storeLimit_); 2622 } 2623 if (((bitField0_ & 0x00000040) == 0x00000040)) { 2624 size += com.google.protobuf.CodedOutputStream 2625 .computeUInt32Size(9, storeOffset_); 2626 } 2627 if (((bitField0_ & 0x00000080) == 0x00000080)) { 2628 size += com.google.protobuf.CodedOutputStream 2629 .computeBoolSize(10, existenceOnly_); 2630 } 2631 if (((bitField0_ & 0x00000100) == 0x00000100)) { 2632 size += com.google.protobuf.CodedOutputStream 2633 .computeBoolSize(11, closestRowBefore_); 2634 } 2635 if (((bitField0_ & 0x00000200) == 0x00000200)) { 2636 size += com.google.protobuf.CodedOutputStream 2637 .computeEnumSize(12, consistency_.getNumber()); 2638 } 2639 for (int i = 0; i < cfTimeRange_.size(); i++) { 2640 size += com.google.protobuf.CodedOutputStream 2641 .computeMessageSize(13, cfTimeRange_.get(i)); 2642 } 2643 size += getUnknownFields().getSerializedSize(); 2644 memoizedSerializedSize = size; 2645 return size; 2646 } 2647 2648 private static final long serialVersionUID = 0L; 2649 @java.lang.Override writeReplace()2650 protected java.lang.Object writeReplace() 2651 throws java.io.ObjectStreamException { 2652 return super.writeReplace(); 2653 } 2654 2655 @java.lang.Override equals(final java.lang.Object obj)2656 public boolean equals(final java.lang.Object obj) { 2657 if (obj == this) { 2658 return true; 2659 } 2660 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)) { 2661 return super.equals(obj); 2662 } 2663 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) obj; 2664 2665 boolean result = true; 2666 result = result && (hasRow() == other.hasRow()); 2667 if (hasRow()) { 2668 result = result && getRow() 2669 .equals(other.getRow()); 2670 } 2671 result = result && getColumnList() 2672 .equals(other.getColumnList()); 2673 result = result && getAttributeList() 2674 .equals(other.getAttributeList()); 2675 result = result && (hasFilter() == other.hasFilter()); 2676 if (hasFilter()) { 2677 result = result && getFilter() 2678 .equals(other.getFilter()); 2679 } 2680 result = result && (hasTimeRange() == other.hasTimeRange()); 2681 if (hasTimeRange()) { 2682 result = result && getTimeRange() 2683 .equals(other.getTimeRange()); 2684 } 2685 result = result && (hasMaxVersions() == other.hasMaxVersions()); 2686 if (hasMaxVersions()) { 2687 result = result && (getMaxVersions() 2688 == other.getMaxVersions()); 2689 } 2690 result = result && (hasCacheBlocks() == other.hasCacheBlocks()); 2691 if (hasCacheBlocks()) { 2692 result = result && (getCacheBlocks() 2693 == other.getCacheBlocks()); 2694 } 2695 result = result && (hasStoreLimit() == other.hasStoreLimit()); 2696 if (hasStoreLimit()) { 2697 result = result && (getStoreLimit() 2698 == other.getStoreLimit()); 2699 } 2700 result = result && (hasStoreOffset() == other.hasStoreOffset()); 2701 if (hasStoreOffset()) { 2702 result = result && (getStoreOffset() 2703 == other.getStoreOffset()); 2704 } 2705 result = result && (hasExistenceOnly() == other.hasExistenceOnly()); 2706 if (hasExistenceOnly()) { 2707 result = result && (getExistenceOnly() 2708 == other.getExistenceOnly()); 2709 } 2710 result = result && (hasClosestRowBefore() == other.hasClosestRowBefore()); 2711 if (hasClosestRowBefore()) { 2712 result = result && (getClosestRowBefore() 2713 == other.getClosestRowBefore()); 2714 } 2715 result = result && (hasConsistency() == other.hasConsistency()); 2716 if (hasConsistency()) { 2717 result = result && 2718 (getConsistency() == other.getConsistency()); 2719 } 2720 result = result && getCfTimeRangeList() 2721 .equals(other.getCfTimeRangeList()); 2722 result = result && 2723 getUnknownFields().equals(other.getUnknownFields()); 2724 return result; 2725 } 2726 2727 private int memoizedHashCode = 0; 2728 @java.lang.Override hashCode()2729 public int hashCode() { 2730 if (memoizedHashCode != 0) { 2731 return memoizedHashCode; 2732 } 2733 int hash = 41; 2734 hash = (19 * hash) + getDescriptorForType().hashCode(); 2735 if (hasRow()) { 2736 hash = (37 * hash) + ROW_FIELD_NUMBER; 2737 hash = (53 * hash) + getRow().hashCode(); 2738 } 2739 if (getColumnCount() > 0) { 2740 hash = (37 * hash) + COLUMN_FIELD_NUMBER; 2741 hash = (53 * hash) + getColumnList().hashCode(); 2742 } 2743 if (getAttributeCount() > 0) { 2744 hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; 2745 hash = (53 * hash) + getAttributeList().hashCode(); 2746 } 2747 if (hasFilter()) { 2748 hash = (37 * hash) + FILTER_FIELD_NUMBER; 2749 hash = (53 * hash) + getFilter().hashCode(); 2750 } 2751 if (hasTimeRange()) { 2752 hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; 2753 hash = (53 * hash) + getTimeRange().hashCode(); 2754 } 2755 if (hasMaxVersions()) { 2756 hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER; 2757 hash = (53 * hash) + getMaxVersions(); 2758 } 2759 if (hasCacheBlocks()) { 2760 hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER; 2761 hash = (53 * hash) + hashBoolean(getCacheBlocks()); 2762 } 2763 if (hasStoreLimit()) { 2764 hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER; 2765 hash = (53 * hash) + getStoreLimit(); 2766 } 2767 if (hasStoreOffset()) { 2768 hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER; 2769 hash = (53 * hash) + getStoreOffset(); 2770 } 2771 if (hasExistenceOnly()) { 2772 hash = (37 * hash) + EXISTENCE_ONLY_FIELD_NUMBER; 2773 hash = (53 * hash) + hashBoolean(getExistenceOnly()); 2774 } 2775 if (hasClosestRowBefore()) { 2776 hash = (37 * hash) + CLOSEST_ROW_BEFORE_FIELD_NUMBER; 2777 hash = (53 * hash) + hashBoolean(getClosestRowBefore()); 2778 } 2779 if (hasConsistency()) { 2780 hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER; 2781 hash = (53 * hash) + hashEnum(getConsistency()); 2782 } 2783 if (getCfTimeRangeCount() > 0) { 2784 hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER; 2785 hash = (53 * hash) + getCfTimeRangeList().hashCode(); 2786 } 2787 hash = (29 * hash) + getUnknownFields().hashCode(); 2788 memoizedHashCode = hash; 2789 return hash; 2790 } 2791 parseFrom( com.google.protobuf.ByteString data)2792 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( 2793 com.google.protobuf.ByteString data) 2794 throws com.google.protobuf.InvalidProtocolBufferException { 2795 return PARSER.parseFrom(data); 2796 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2797 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( 2798 com.google.protobuf.ByteString data, 2799 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2800 throws com.google.protobuf.InvalidProtocolBufferException { 2801 return PARSER.parseFrom(data, extensionRegistry); 2802 } parseFrom(byte[] data)2803 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(byte[] data) 2804 throws com.google.protobuf.InvalidProtocolBufferException { 2805 return PARSER.parseFrom(data); 2806 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2807 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( 2808 byte[] data, 2809 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2810 throws com.google.protobuf.InvalidProtocolBufferException { 2811 return PARSER.parseFrom(data, extensionRegistry); 2812 } parseFrom(java.io.InputStream input)2813 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom(java.io.InputStream input) 2814 throws java.io.IOException { 2815 return PARSER.parseFrom(input); 2816 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2817 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( 2818 java.io.InputStream input, 2819 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2820 throws java.io.IOException { 2821 return PARSER.parseFrom(input, extensionRegistry); 2822 } parseDelimitedFrom(java.io.InputStream input)2823 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom(java.io.InputStream input) 2824 throws java.io.IOException { 2825 return PARSER.parseDelimitedFrom(input); 2826 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2827 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseDelimitedFrom( 2828 java.io.InputStream input, 2829 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2830 throws java.io.IOException { 2831 return PARSER.parseDelimitedFrom(input, extensionRegistry); 2832 } parseFrom( com.google.protobuf.CodedInputStream input)2833 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( 2834 com.google.protobuf.CodedInputStream input) 2835 throws java.io.IOException { 2836 return PARSER.parseFrom(input); 2837 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2838 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parseFrom( 2839 com.google.protobuf.CodedInputStream input, 2840 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2841 throws java.io.IOException { 2842 return PARSER.parseFrom(input, extensionRegistry); 2843 } 2844 newBuilder()2845 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()2846 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype)2847 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get prototype) { 2848 return newBuilder().mergeFrom(prototype); 2849 } toBuilder()2850 public Builder toBuilder() { return newBuilder(this); } 2851 2852 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2853 protected Builder newBuilderForType( 2854 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2855 Builder builder = new Builder(parent); 2856 return builder; 2857 } 2858 /** 2859 * Protobuf type {@code Get} 2860 * 2861 * <pre> 2862 ** 2863 * The protocol buffer version of Get. 2864 * Unless existence_only is specified, return all the requested data 2865 * for the row that matches exactly, or the one that immediately 2866 * precedes it if closest_row_before is specified. 2867 * </pre> 2868 */ 2869 public static final class Builder extends 2870 com.google.protobuf.GeneratedMessage.Builder<Builder> 2871 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder { 2872 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2873 getDescriptor() { 2874 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; 2875 } 2876 2877 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2878 internalGetFieldAccessorTable() { 2879 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_fieldAccessorTable 2880 .ensureFieldAccessorsInitialized( 2881 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder.class); 2882 } 2883 2884 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder() Builder()2885 private Builder() { 2886 maybeForceBuilderInitialization(); 2887 } 2888 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2889 private Builder( 2890 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2891 super(parent); 2892 maybeForceBuilderInitialization(); 2893 } maybeForceBuilderInitialization()2894 private void maybeForceBuilderInitialization() { 2895 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 2896 getColumnFieldBuilder(); 2897 getAttributeFieldBuilder(); 2898 getFilterFieldBuilder(); 2899 getTimeRangeFieldBuilder(); 2900 getCfTimeRangeFieldBuilder(); 2901 } 2902 } create()2903 private static Builder create() { 2904 return new Builder(); 2905 } 2906 clear()2907 public Builder clear() { 2908 super.clear(); 2909 row_ = com.google.protobuf.ByteString.EMPTY; 2910 bitField0_ = (bitField0_ & ~0x00000001); 2911 if (columnBuilder_ == null) { 2912 column_ = java.util.Collections.emptyList(); 2913 bitField0_ = (bitField0_ & ~0x00000002); 2914 } else { 2915 columnBuilder_.clear(); 2916 } 2917 if (attributeBuilder_ == null) { 2918 attribute_ = java.util.Collections.emptyList(); 2919 bitField0_ = (bitField0_ & ~0x00000004); 2920 } else { 2921 attributeBuilder_.clear(); 2922 } 2923 if (filterBuilder_ == null) { 2924 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 2925 } else { 2926 filterBuilder_.clear(); 2927 } 2928 bitField0_ = (bitField0_ & ~0x00000008); 2929 if (timeRangeBuilder_ == null) { 2930 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 2931 } else { 2932 timeRangeBuilder_.clear(); 2933 } 2934 bitField0_ = (bitField0_ & ~0x00000010); 2935 maxVersions_ = 1; 2936 bitField0_ = (bitField0_ & ~0x00000020); 2937 cacheBlocks_ = true; 2938 bitField0_ = (bitField0_ & ~0x00000040); 2939 storeLimit_ = 0; 2940 bitField0_ = (bitField0_ & ~0x00000080); 2941 storeOffset_ = 0; 2942 bitField0_ = (bitField0_ & ~0x00000100); 2943 existenceOnly_ = false; 2944 bitField0_ = (bitField0_ & ~0x00000200); 2945 closestRowBefore_ = false; 2946 bitField0_ = (bitField0_ & ~0x00000400); 2947 consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 2948 bitField0_ = (bitField0_ & ~0x00000800); 2949 if (cfTimeRangeBuilder_ == null) { 2950 cfTimeRange_ = java.util.Collections.emptyList(); 2951 bitField0_ = (bitField0_ & ~0x00001000); 2952 } else { 2953 cfTimeRangeBuilder_.clear(); 2954 } 2955 return this; 2956 } 2957 clone()2958 public Builder clone() { 2959 return create().mergeFrom(buildPartial()); 2960 } 2961 2962 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()2963 getDescriptorForType() { 2964 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Get_descriptor; 2965 } 2966 getDefaultInstanceForType()2967 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getDefaultInstanceForType() { 2968 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); 2969 } 2970 build()2971 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get build() { 2972 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = buildPartial(); 2973 if (!result.isInitialized()) { 2974 throw newUninitializedMessageException(result); 2975 } 2976 return result; 2977 } 2978 buildPartial()2979 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get buildPartial() { 2980 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get(this); 2981 int from_bitField0_ = bitField0_; 2982 int to_bitField0_ = 0; 2983 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 2984 to_bitField0_ |= 0x00000001; 2985 } 2986 result.row_ = row_; 2987 if (columnBuilder_ == null) { 2988 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2989 column_ = java.util.Collections.unmodifiableList(column_); 2990 bitField0_ = (bitField0_ & ~0x00000002); 2991 } 2992 result.column_ = column_; 2993 } else { 2994 result.column_ = columnBuilder_.build(); 2995 } 2996 if (attributeBuilder_ == null) { 2997 if (((bitField0_ & 0x00000004) == 0x00000004)) { 2998 attribute_ = java.util.Collections.unmodifiableList(attribute_); 2999 bitField0_ = (bitField0_ & ~0x00000004); 3000 } 3001 result.attribute_ = attribute_; 3002 } else { 3003 result.attribute_ = attributeBuilder_.build(); 3004 } 3005 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 3006 to_bitField0_ |= 0x00000002; 3007 } 3008 if (filterBuilder_ == null) { 3009 result.filter_ = filter_; 3010 } else { 3011 result.filter_ = filterBuilder_.build(); 3012 } 3013 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 3014 to_bitField0_ |= 0x00000004; 3015 } 3016 if (timeRangeBuilder_ == null) { 3017 result.timeRange_ = timeRange_; 3018 } else { 3019 result.timeRange_ = timeRangeBuilder_.build(); 3020 } 3021 if (((from_bitField0_ & 0x00000020) == 0x00000020)) { 3022 to_bitField0_ |= 0x00000008; 3023 } 3024 result.maxVersions_ = maxVersions_; 3025 if (((from_bitField0_ & 0x00000040) == 0x00000040)) { 3026 to_bitField0_ |= 0x00000010; 3027 } 3028 result.cacheBlocks_ = cacheBlocks_; 3029 if (((from_bitField0_ & 0x00000080) == 0x00000080)) { 3030 to_bitField0_ |= 0x00000020; 3031 } 3032 result.storeLimit_ = storeLimit_; 3033 if (((from_bitField0_ & 0x00000100) == 0x00000100)) { 3034 to_bitField0_ |= 0x00000040; 3035 } 3036 result.storeOffset_ = storeOffset_; 3037 if (((from_bitField0_ & 0x00000200) == 0x00000200)) { 3038 to_bitField0_ |= 0x00000080; 3039 } 3040 result.existenceOnly_ = existenceOnly_; 3041 if (((from_bitField0_ & 0x00000400) == 0x00000400)) { 3042 to_bitField0_ |= 0x00000100; 3043 } 3044 result.closestRowBefore_ = closestRowBefore_; 3045 if (((from_bitField0_ & 0x00000800) == 0x00000800)) { 3046 to_bitField0_ |= 0x00000200; 3047 } 3048 result.consistency_ = consistency_; 3049 if (cfTimeRangeBuilder_ == null) { 3050 if (((bitField0_ & 0x00001000) == 0x00001000)) { 3051 cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); 3052 bitField0_ = (bitField0_ & ~0x00001000); 3053 } 3054 result.cfTimeRange_ = cfTimeRange_; 3055 } else { 3056 result.cfTimeRange_ = cfTimeRangeBuilder_.build(); 3057 } 3058 result.bitField0_ = to_bitField0_; 3059 onBuilt(); 3060 return result; 3061 } 3062 mergeFrom(com.google.protobuf.Message other)3063 public Builder mergeFrom(com.google.protobuf.Message other) { 3064 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) { 3065 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get)other); 3066 } else { 3067 super.mergeFrom(other); 3068 return this; 3069 } 3070 } 3071 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other)3072 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get other) { 3073 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) return this; 3074 if (other.hasRow()) { 3075 setRow(other.getRow()); 3076 } 3077 if (columnBuilder_ == null) { 3078 if (!other.column_.isEmpty()) { 3079 if (column_.isEmpty()) { 3080 column_ = other.column_; 3081 bitField0_ = (bitField0_ & ~0x00000002); 3082 } else { 3083 ensureColumnIsMutable(); 3084 column_.addAll(other.column_); 3085 } 3086 onChanged(); 3087 } 3088 } else { 3089 if (!other.column_.isEmpty()) { 3090 if (columnBuilder_.isEmpty()) { 3091 columnBuilder_.dispose(); 3092 columnBuilder_ = null; 3093 column_ = other.column_; 3094 bitField0_ = (bitField0_ & ~0x00000002); 3095 columnBuilder_ = 3096 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3097 getColumnFieldBuilder() : null; 3098 } else { 3099 columnBuilder_.addAllMessages(other.column_); 3100 } 3101 } 3102 } 3103 if (attributeBuilder_ == null) { 3104 if (!other.attribute_.isEmpty()) { 3105 if (attribute_.isEmpty()) { 3106 attribute_ = other.attribute_; 3107 bitField0_ = (bitField0_ & ~0x00000004); 3108 } else { 3109 ensureAttributeIsMutable(); 3110 attribute_.addAll(other.attribute_); 3111 } 3112 onChanged(); 3113 } 3114 } else { 3115 if (!other.attribute_.isEmpty()) { 3116 if (attributeBuilder_.isEmpty()) { 3117 attributeBuilder_.dispose(); 3118 attributeBuilder_ = null; 3119 attribute_ = other.attribute_; 3120 bitField0_ = (bitField0_ & ~0x00000004); 3121 attributeBuilder_ = 3122 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3123 getAttributeFieldBuilder() : null; 3124 } else { 3125 attributeBuilder_.addAllMessages(other.attribute_); 3126 } 3127 } 3128 } 3129 if (other.hasFilter()) { 3130 mergeFilter(other.getFilter()); 3131 } 3132 if (other.hasTimeRange()) { 3133 mergeTimeRange(other.getTimeRange()); 3134 } 3135 if (other.hasMaxVersions()) { 3136 setMaxVersions(other.getMaxVersions()); 3137 } 3138 if (other.hasCacheBlocks()) { 3139 setCacheBlocks(other.getCacheBlocks()); 3140 } 3141 if (other.hasStoreLimit()) { 3142 setStoreLimit(other.getStoreLimit()); 3143 } 3144 if (other.hasStoreOffset()) { 3145 setStoreOffset(other.getStoreOffset()); 3146 } 3147 if (other.hasExistenceOnly()) { 3148 setExistenceOnly(other.getExistenceOnly()); 3149 } 3150 if (other.hasClosestRowBefore()) { 3151 setClosestRowBefore(other.getClosestRowBefore()); 3152 } 3153 if (other.hasConsistency()) { 3154 setConsistency(other.getConsistency()); 3155 } 3156 if (cfTimeRangeBuilder_ == null) { 3157 if (!other.cfTimeRange_.isEmpty()) { 3158 if (cfTimeRange_.isEmpty()) { 3159 cfTimeRange_ = other.cfTimeRange_; 3160 bitField0_ = (bitField0_ & ~0x00001000); 3161 } else { 3162 ensureCfTimeRangeIsMutable(); 3163 cfTimeRange_.addAll(other.cfTimeRange_); 3164 } 3165 onChanged(); 3166 } 3167 } else { 3168 if (!other.cfTimeRange_.isEmpty()) { 3169 if (cfTimeRangeBuilder_.isEmpty()) { 3170 cfTimeRangeBuilder_.dispose(); 3171 cfTimeRangeBuilder_ = null; 3172 cfTimeRange_ = other.cfTimeRange_; 3173 bitField0_ = (bitField0_ & ~0x00001000); 3174 cfTimeRangeBuilder_ = 3175 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3176 getCfTimeRangeFieldBuilder() : null; 3177 } else { 3178 cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_); 3179 } 3180 } 3181 } 3182 this.mergeUnknownFields(other.getUnknownFields()); 3183 return this; 3184 } 3185 isInitialized()3186 public final boolean isInitialized() { 3187 if (!hasRow()) { 3188 3189 return false; 3190 } 3191 for (int i = 0; i < getColumnCount(); i++) { 3192 if (!getColumn(i).isInitialized()) { 3193 3194 return false; 3195 } 3196 } 3197 for (int i = 0; i < getAttributeCount(); i++) { 3198 if (!getAttribute(i).isInitialized()) { 3199 3200 return false; 3201 } 3202 } 3203 if (hasFilter()) { 3204 if (!getFilter().isInitialized()) { 3205 3206 return false; 3207 } 3208 } 3209 for (int i = 0; i < getCfTimeRangeCount(); i++) { 3210 if (!getCfTimeRange(i).isInitialized()) { 3211 3212 return false; 3213 } 3214 } 3215 return true; 3216 } 3217 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3218 public Builder mergeFrom( 3219 com.google.protobuf.CodedInputStream input, 3220 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3221 throws java.io.IOException { 3222 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get parsedMessage = null; 3223 try { 3224 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 3225 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3226 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get) e.getUnfinishedMessage(); 3227 throw e; 3228 } finally { 3229 if (parsedMessage != null) { 3230 mergeFrom(parsedMessage); 3231 } 3232 } 3233 return this; 3234 } 3235 private int bitField0_; 3236 3237 // required bytes row = 1; 3238 private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; 3239 /** 3240 * <code>required bytes row = 1;</code> 3241 */ hasRow()3242 public boolean hasRow() { 3243 return ((bitField0_ & 0x00000001) == 0x00000001); 3244 } 3245 /** 3246 * <code>required bytes row = 1;</code> 3247 */ getRow()3248 public com.google.protobuf.ByteString getRow() { 3249 return row_; 3250 } 3251 /** 3252 * <code>required bytes row = 1;</code> 3253 */ setRow(com.google.protobuf.ByteString value)3254 public Builder setRow(com.google.protobuf.ByteString value) { 3255 if (value == null) { 3256 throw new NullPointerException(); 3257 } 3258 bitField0_ |= 0x00000001; 3259 row_ = value; 3260 onChanged(); 3261 return this; 3262 } 3263 /** 3264 * <code>required bytes row = 1;</code> 3265 */ clearRow()3266 public Builder clearRow() { 3267 bitField0_ = (bitField0_ & ~0x00000001); 3268 row_ = getDefaultInstance().getRow(); 3269 onChanged(); 3270 return this; 3271 } 3272 3273 // repeated .Column column = 2; 3274 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ = 3275 java.util.Collections.emptyList(); ensureColumnIsMutable()3276 private void ensureColumnIsMutable() { 3277 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 3278 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_); 3279 bitField0_ |= 0x00000002; 3280 } 3281 } 3282 3283 private com.google.protobuf.RepeatedFieldBuilder< 3284 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; 3285 3286 /** 3287 * <code>repeated .Column column = 2;</code> 3288 */ getColumnList()3289 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() { 3290 if (columnBuilder_ == null) { 3291 return java.util.Collections.unmodifiableList(column_); 3292 } else { 3293 return columnBuilder_.getMessageList(); 3294 } 3295 } 3296 /** 3297 * <code>repeated .Column column = 2;</code> 3298 */ getColumnCount()3299 public int getColumnCount() { 3300 if (columnBuilder_ == null) { 3301 return column_.size(); 3302 } else { 3303 return columnBuilder_.getCount(); 3304 } 3305 } 3306 /** 3307 * <code>repeated .Column column = 2;</code> 3308 */ getColumn(int index)3309 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { 3310 if (columnBuilder_ == null) { 3311 return column_.get(index); 3312 } else { 3313 return columnBuilder_.getMessage(index); 3314 } 3315 } 3316 /** 3317 * <code>repeated .Column column = 2;</code> 3318 */ setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)3319 public Builder setColumn( 3320 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { 3321 if (columnBuilder_ == null) { 3322 if (value == null) { 3323 throw new NullPointerException(); 3324 } 3325 ensureColumnIsMutable(); 3326 column_.set(index, value); 3327 onChanged(); 3328 } else { 3329 columnBuilder_.setMessage(index, value); 3330 } 3331 return this; 3332 } 3333 /** 3334 * <code>repeated .Column column = 2;</code> 3335 */ setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)3336 public Builder setColumn( 3337 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { 3338 if (columnBuilder_ == null) { 3339 ensureColumnIsMutable(); 3340 column_.set(index, builderForValue.build()); 3341 onChanged(); 3342 } else { 3343 columnBuilder_.setMessage(index, builderForValue.build()); 3344 } 3345 return this; 3346 } 3347 /** 3348 * <code>repeated .Column column = 2;</code> 3349 */ addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)3350 public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { 3351 if (columnBuilder_ == null) { 3352 if (value == null) { 3353 throw new NullPointerException(); 3354 } 3355 ensureColumnIsMutable(); 3356 column_.add(value); 3357 onChanged(); 3358 } else { 3359 columnBuilder_.addMessage(value); 3360 } 3361 return this; 3362 } 3363 /** 3364 * <code>repeated .Column column = 2;</code> 3365 */ addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)3366 public Builder addColumn( 3367 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { 3368 if (columnBuilder_ == null) { 3369 if (value == null) { 3370 throw new NullPointerException(); 3371 } 3372 ensureColumnIsMutable(); 3373 column_.add(index, value); 3374 onChanged(); 3375 } else { 3376 columnBuilder_.addMessage(index, value); 3377 } 3378 return this; 3379 } 3380 /** 3381 * <code>repeated .Column column = 2;</code> 3382 */ addColumn( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)3383 public Builder addColumn( 3384 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { 3385 if (columnBuilder_ == null) { 3386 ensureColumnIsMutable(); 3387 column_.add(builderForValue.build()); 3388 onChanged(); 3389 } else { 3390 columnBuilder_.addMessage(builderForValue.build()); 3391 } 3392 return this; 3393 } 3394 /** 3395 * <code>repeated .Column column = 2;</code> 3396 */ addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)3397 public Builder addColumn( 3398 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { 3399 if (columnBuilder_ == null) { 3400 ensureColumnIsMutable(); 3401 column_.add(index, builderForValue.build()); 3402 onChanged(); 3403 } else { 3404 columnBuilder_.addMessage(index, builderForValue.build()); 3405 } 3406 return this; 3407 } 3408 /** 3409 * <code>repeated .Column column = 2;</code> 3410 */ addAllColumn( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values)3411 public Builder addAllColumn( 3412 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) { 3413 if (columnBuilder_ == null) { 3414 ensureColumnIsMutable(); 3415 super.addAll(values, column_); 3416 onChanged(); 3417 } else { 3418 columnBuilder_.addAllMessages(values); 3419 } 3420 return this; 3421 } 3422 /** 3423 * <code>repeated .Column column = 2;</code> 3424 */ clearColumn()3425 public Builder clearColumn() { 3426 if (columnBuilder_ == null) { 3427 column_ = java.util.Collections.emptyList(); 3428 bitField0_ = (bitField0_ & ~0x00000002); 3429 onChanged(); 3430 } else { 3431 columnBuilder_.clear(); 3432 } 3433 return this; 3434 } 3435 /** 3436 * <code>repeated .Column column = 2;</code> 3437 */ removeColumn(int index)3438 public Builder removeColumn(int index) { 3439 if (columnBuilder_ == null) { 3440 ensureColumnIsMutable(); 3441 column_.remove(index); 3442 onChanged(); 3443 } else { 3444 columnBuilder_.remove(index); 3445 } 3446 return this; 3447 } 3448 /** 3449 * <code>repeated .Column column = 2;</code> 3450 */ getColumnBuilder( int index)3451 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( 3452 int index) { 3453 return getColumnFieldBuilder().getBuilder(index); 3454 } 3455 /** 3456 * <code>repeated .Column column = 2;</code> 3457 */ getColumnOrBuilder( int index)3458 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( 3459 int index) { 3460 if (columnBuilder_ == null) { 3461 return column_.get(index); } else { 3462 return columnBuilder_.getMessageOrBuilder(index); 3463 } 3464 } 3465 /** 3466 * <code>repeated .Column column = 2;</code> 3467 */ 3468 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList()3469 getColumnOrBuilderList() { 3470 if (columnBuilder_ != null) { 3471 return columnBuilder_.getMessageOrBuilderList(); 3472 } else { 3473 return java.util.Collections.unmodifiableList(column_); 3474 } 3475 } 3476 /** 3477 * <code>repeated .Column column = 2;</code> 3478 */ addColumnBuilder()3479 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { 3480 return getColumnFieldBuilder().addBuilder( 3481 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); 3482 } 3483 /** 3484 * <code>repeated .Column column = 2;</code> 3485 */ addColumnBuilder( int index)3486 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( 3487 int index) { 3488 return getColumnFieldBuilder().addBuilder( 3489 index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); 3490 } 3491 /** 3492 * <code>repeated .Column column = 2;</code> 3493 */ 3494 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder> getColumnBuilderList()3495 getColumnBuilderList() { 3496 return getColumnFieldBuilder().getBuilderList(); 3497 } 3498 private com.google.protobuf.RepeatedFieldBuilder< 3499 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder()3500 getColumnFieldBuilder() { 3501 if (columnBuilder_ == null) { 3502 columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 3503 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( 3504 column_, 3505 ((bitField0_ & 0x00000002) == 0x00000002), 3506 getParentForChildren(), 3507 isClean()); 3508 column_ = null; 3509 } 3510 return columnBuilder_; 3511 } 3512 3513 // repeated .NameBytesPair attribute = 3; 3514 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ = 3515 java.util.Collections.emptyList(); ensureAttributeIsMutable()3516 private void ensureAttributeIsMutable() { 3517 if (!((bitField0_ & 0x00000004) == 0x00000004)) { 3518 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_); 3519 bitField0_ |= 0x00000004; 3520 } 3521 } 3522 3523 private com.google.protobuf.RepeatedFieldBuilder< 3524 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; 3525 3526 /** 3527 * <code>repeated .NameBytesPair attribute = 3;</code> 3528 */ getAttributeList()3529 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { 3530 if (attributeBuilder_ == null) { 3531 return java.util.Collections.unmodifiableList(attribute_); 3532 } else { 3533 return attributeBuilder_.getMessageList(); 3534 } 3535 } 3536 /** 3537 * <code>repeated .NameBytesPair attribute = 3;</code> 3538 */ getAttributeCount()3539 public int getAttributeCount() { 3540 if (attributeBuilder_ == null) { 3541 return attribute_.size(); 3542 } else { 3543 return attributeBuilder_.getCount(); 3544 } 3545 } 3546 /** 3547 * <code>repeated .NameBytesPair attribute = 3;</code> 3548 */ getAttribute(int index)3549 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { 3550 if (attributeBuilder_ == null) { 3551 return attribute_.get(index); 3552 } else { 3553 return attributeBuilder_.getMessage(index); 3554 } 3555 } 3556 /** 3557 * <code>repeated .NameBytesPair attribute = 3;</code> 3558 */ setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)3559 public Builder setAttribute( 3560 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 3561 if (attributeBuilder_ == null) { 3562 if (value == null) { 3563 throw new NullPointerException(); 3564 } 3565 ensureAttributeIsMutable(); 3566 attribute_.set(index, value); 3567 onChanged(); 3568 } else { 3569 attributeBuilder_.setMessage(index, value); 3570 } 3571 return this; 3572 } 3573 /** 3574 * <code>repeated .NameBytesPair attribute = 3;</code> 3575 */ setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)3576 public Builder setAttribute( 3577 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 3578 if (attributeBuilder_ == null) { 3579 ensureAttributeIsMutable(); 3580 attribute_.set(index, builderForValue.build()); 3581 onChanged(); 3582 } else { 3583 attributeBuilder_.setMessage(index, builderForValue.build()); 3584 } 3585 return this; 3586 } 3587 /** 3588 * <code>repeated .NameBytesPair attribute = 3;</code> 3589 */ addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)3590 public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 3591 if (attributeBuilder_ == null) { 3592 if (value == null) { 3593 throw new NullPointerException(); 3594 } 3595 ensureAttributeIsMutable(); 3596 attribute_.add(value); 3597 onChanged(); 3598 } else { 3599 attributeBuilder_.addMessage(value); 3600 } 3601 return this; 3602 } 3603 /** 3604 * <code>repeated .NameBytesPair attribute = 3;</code> 3605 */ addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)3606 public Builder addAttribute( 3607 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 3608 if (attributeBuilder_ == null) { 3609 if (value == null) { 3610 throw new NullPointerException(); 3611 } 3612 ensureAttributeIsMutable(); 3613 attribute_.add(index, value); 3614 onChanged(); 3615 } else { 3616 attributeBuilder_.addMessage(index, value); 3617 } 3618 return this; 3619 } 3620 /** 3621 * <code>repeated .NameBytesPair attribute = 3;</code> 3622 */ addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)3623 public Builder addAttribute( 3624 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 3625 if (attributeBuilder_ == null) { 3626 ensureAttributeIsMutable(); 3627 attribute_.add(builderForValue.build()); 3628 onChanged(); 3629 } else { 3630 attributeBuilder_.addMessage(builderForValue.build()); 3631 } 3632 return this; 3633 } 3634 /** 3635 * <code>repeated .NameBytesPair attribute = 3;</code> 3636 */ addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)3637 public Builder addAttribute( 3638 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 3639 if (attributeBuilder_ == null) { 3640 ensureAttributeIsMutable(); 3641 attribute_.add(index, builderForValue.build()); 3642 onChanged(); 3643 } else { 3644 attributeBuilder_.addMessage(index, builderForValue.build()); 3645 } 3646 return this; 3647 } 3648 /** 3649 * <code>repeated .NameBytesPair attribute = 3;</code> 3650 */ addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values)3651 public Builder addAllAttribute( 3652 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) { 3653 if (attributeBuilder_ == null) { 3654 ensureAttributeIsMutable(); 3655 super.addAll(values, attribute_); 3656 onChanged(); 3657 } else { 3658 attributeBuilder_.addAllMessages(values); 3659 } 3660 return this; 3661 } 3662 /** 3663 * <code>repeated .NameBytesPair attribute = 3;</code> 3664 */ clearAttribute()3665 public Builder clearAttribute() { 3666 if (attributeBuilder_ == null) { 3667 attribute_ = java.util.Collections.emptyList(); 3668 bitField0_ = (bitField0_ & ~0x00000004); 3669 onChanged(); 3670 } else { 3671 attributeBuilder_.clear(); 3672 } 3673 return this; 3674 } 3675 /** 3676 * <code>repeated .NameBytesPair attribute = 3;</code> 3677 */ removeAttribute(int index)3678 public Builder removeAttribute(int index) { 3679 if (attributeBuilder_ == null) { 3680 ensureAttributeIsMutable(); 3681 attribute_.remove(index); 3682 onChanged(); 3683 } else { 3684 attributeBuilder_.remove(index); 3685 } 3686 return this; 3687 } 3688 /** 3689 * <code>repeated .NameBytesPair attribute = 3;</code> 3690 */ getAttributeBuilder( int index)3691 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( 3692 int index) { 3693 return getAttributeFieldBuilder().getBuilder(index); 3694 } 3695 /** 3696 * <code>repeated .NameBytesPair attribute = 3;</code> 3697 */ getAttributeOrBuilder( int index)3698 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 3699 int index) { 3700 if (attributeBuilder_ == null) { 3701 return attribute_.get(index); } else { 3702 return attributeBuilder_.getMessageOrBuilder(index); 3703 } 3704 } 3705 /** 3706 * <code>repeated .NameBytesPair attribute = 3;</code> 3707 */ 3708 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()3709 getAttributeOrBuilderList() { 3710 if (attributeBuilder_ != null) { 3711 return attributeBuilder_.getMessageOrBuilderList(); 3712 } else { 3713 return java.util.Collections.unmodifiableList(attribute_); 3714 } 3715 } 3716 /** 3717 * <code>repeated .NameBytesPair attribute = 3;</code> 3718 */ addAttributeBuilder()3719 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { 3720 return getAttributeFieldBuilder().addBuilder( 3721 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); 3722 } 3723 /** 3724 * <code>repeated .NameBytesPair attribute = 3;</code> 3725 */ addAttributeBuilder( int index)3726 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( 3727 int index) { 3728 return getAttributeFieldBuilder().addBuilder( 3729 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); 3730 } 3731 /** 3732 * <code>repeated .NameBytesPair attribute = 3;</code> 3733 */ 3734 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> getAttributeBuilderList()3735 getAttributeBuilderList() { 3736 return getAttributeFieldBuilder().getBuilderList(); 3737 } 3738 private com.google.protobuf.RepeatedFieldBuilder< 3739 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder()3740 getAttributeFieldBuilder() { 3741 if (attributeBuilder_ == null) { 3742 attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 3743 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( 3744 attribute_, 3745 ((bitField0_ & 0x00000004) == 0x00000004), 3746 getParentForChildren(), 3747 isClean()); 3748 attribute_ = null; 3749 } 3750 return attributeBuilder_; 3751 } 3752 3753 // optional .Filter filter = 4; 3754 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 3755 private com.google.protobuf.SingleFieldBuilder< 3756 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; 3757 /** 3758 * <code>optional .Filter filter = 4;</code> 3759 */ hasFilter()3760 public boolean hasFilter() { 3761 return ((bitField0_ & 0x00000008) == 0x00000008); 3762 } 3763 /** 3764 * <code>optional .Filter filter = 4;</code> 3765 */ getFilter()3766 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { 3767 if (filterBuilder_ == null) { 3768 return filter_; 3769 } else { 3770 return filterBuilder_.getMessage(); 3771 } 3772 } 3773 /** 3774 * <code>optional .Filter filter = 4;</code> 3775 */ setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)3776 public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 3777 if (filterBuilder_ == null) { 3778 if (value == null) { 3779 throw new NullPointerException(); 3780 } 3781 filter_ = value; 3782 onChanged(); 3783 } else { 3784 filterBuilder_.setMessage(value); 3785 } 3786 bitField0_ |= 0x00000008; 3787 return this; 3788 } 3789 /** 3790 * <code>optional .Filter filter = 4;</code> 3791 */ setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)3792 public Builder setFilter( 3793 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { 3794 if (filterBuilder_ == null) { 3795 filter_ = builderForValue.build(); 3796 onChanged(); 3797 } else { 3798 filterBuilder_.setMessage(builderForValue.build()); 3799 } 3800 bitField0_ |= 0x00000008; 3801 return this; 3802 } 3803 /** 3804 * <code>optional .Filter filter = 4;</code> 3805 */ mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)3806 public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 3807 if (filterBuilder_ == null) { 3808 if (((bitField0_ & 0x00000008) == 0x00000008) && 3809 filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { 3810 filter_ = 3811 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); 3812 } else { 3813 filter_ = value; 3814 } 3815 onChanged(); 3816 } else { 3817 filterBuilder_.mergeFrom(value); 3818 } 3819 bitField0_ |= 0x00000008; 3820 return this; 3821 } 3822 /** 3823 * <code>optional .Filter filter = 4;</code> 3824 */ clearFilter()3825 public Builder clearFilter() { 3826 if (filterBuilder_ == null) { 3827 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 3828 onChanged(); 3829 } else { 3830 filterBuilder_.clear(); 3831 } 3832 bitField0_ = (bitField0_ & ~0x00000008); 3833 return this; 3834 } 3835 /** 3836 * <code>optional .Filter filter = 4;</code> 3837 */ getFilterBuilder()3838 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { 3839 bitField0_ |= 0x00000008; 3840 onChanged(); 3841 return getFilterFieldBuilder().getBuilder(); 3842 } 3843 /** 3844 * <code>optional .Filter filter = 4;</code> 3845 */ getFilterOrBuilder()3846 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { 3847 if (filterBuilder_ != null) { 3848 return filterBuilder_.getMessageOrBuilder(); 3849 } else { 3850 return filter_; 3851 } 3852 } 3853 /** 3854 * <code>optional .Filter filter = 4;</code> 3855 */ 3856 private com.google.protobuf.SingleFieldBuilder< 3857 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder()3858 getFilterFieldBuilder() { 3859 if (filterBuilder_ == null) { 3860 filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< 3861 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( 3862 filter_, 3863 getParentForChildren(), 3864 isClean()); 3865 filter_ = null; 3866 } 3867 return filterBuilder_; 3868 } 3869 3870 // optional .TimeRange time_range = 5; 3871 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 3872 private com.google.protobuf.SingleFieldBuilder< 3873 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; 3874 /** 3875 * <code>optional .TimeRange time_range = 5;</code> 3876 */ hasTimeRange()3877 public boolean hasTimeRange() { 3878 return ((bitField0_ & 0x00000010) == 0x00000010); 3879 } 3880 /** 3881 * <code>optional .TimeRange time_range = 5;</code> 3882 */ getTimeRange()3883 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 3884 if (timeRangeBuilder_ == null) { 3885 return timeRange_; 3886 } else { 3887 return timeRangeBuilder_.getMessage(); 3888 } 3889 } 3890 /** 3891 * <code>optional .TimeRange time_range = 5;</code> 3892 */ setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)3893 public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 3894 if (timeRangeBuilder_ == null) { 3895 if (value == null) { 3896 throw new NullPointerException(); 3897 } 3898 timeRange_ = value; 3899 onChanged(); 3900 } else { 3901 timeRangeBuilder_.setMessage(value); 3902 } 3903 bitField0_ |= 0x00000010; 3904 return this; 3905 } 3906 /** 3907 * <code>optional .TimeRange time_range = 5;</code> 3908 */ setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)3909 public Builder setTimeRange( 3910 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { 3911 if (timeRangeBuilder_ == null) { 3912 timeRange_ = builderForValue.build(); 3913 onChanged(); 3914 } else { 3915 timeRangeBuilder_.setMessage(builderForValue.build()); 3916 } 3917 bitField0_ |= 0x00000010; 3918 return this; 3919 } 3920 /** 3921 * <code>optional .TimeRange time_range = 5;</code> 3922 */ mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)3923 public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 3924 if (timeRangeBuilder_ == null) { 3925 if (((bitField0_ & 0x00000010) == 0x00000010) && 3926 timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { 3927 timeRange_ = 3928 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); 3929 } else { 3930 timeRange_ = value; 3931 } 3932 onChanged(); 3933 } else { 3934 timeRangeBuilder_.mergeFrom(value); 3935 } 3936 bitField0_ |= 0x00000010; 3937 return this; 3938 } 3939 /** 3940 * <code>optional .TimeRange time_range = 5;</code> 3941 */ clearTimeRange()3942 public Builder clearTimeRange() { 3943 if (timeRangeBuilder_ == null) { 3944 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 3945 onChanged(); 3946 } else { 3947 timeRangeBuilder_.clear(); 3948 } 3949 bitField0_ = (bitField0_ & ~0x00000010); 3950 return this; 3951 } 3952 /** 3953 * <code>optional .TimeRange time_range = 5;</code> 3954 */ getTimeRangeBuilder()3955 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { 3956 bitField0_ |= 0x00000010; 3957 onChanged(); 3958 return getTimeRangeFieldBuilder().getBuilder(); 3959 } 3960 /** 3961 * <code>optional .TimeRange time_range = 5;</code> 3962 */ getTimeRangeOrBuilder()3963 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 3964 if (timeRangeBuilder_ != null) { 3965 return timeRangeBuilder_.getMessageOrBuilder(); 3966 } else { 3967 return timeRange_; 3968 } 3969 } 3970 /** 3971 * <code>optional .TimeRange time_range = 5;</code> 3972 */ 3973 private com.google.protobuf.SingleFieldBuilder< 3974 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder()3975 getTimeRangeFieldBuilder() { 3976 if (timeRangeBuilder_ == null) { 3977 timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< 3978 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( 3979 timeRange_, 3980 getParentForChildren(), 3981 isClean()); 3982 timeRange_ = null; 3983 } 3984 return timeRangeBuilder_; 3985 } 3986 3987 // optional uint32 max_versions = 6 [default = 1]; 3988 private int maxVersions_ = 1; 3989 /** 3990 * <code>optional uint32 max_versions = 6 [default = 1];</code> 3991 */ hasMaxVersions()3992 public boolean hasMaxVersions() { 3993 return ((bitField0_ & 0x00000020) == 0x00000020); 3994 } 3995 /** 3996 * <code>optional uint32 max_versions = 6 [default = 1];</code> 3997 */ getMaxVersions()3998 public int getMaxVersions() { 3999 return maxVersions_; 4000 } 4001 /** 4002 * <code>optional uint32 max_versions = 6 [default = 1];</code> 4003 */ setMaxVersions(int value)4004 public Builder setMaxVersions(int value) { 4005 bitField0_ |= 0x00000020; 4006 maxVersions_ = value; 4007 onChanged(); 4008 return this; 4009 } 4010 /** 4011 * <code>optional uint32 max_versions = 6 [default = 1];</code> 4012 */ clearMaxVersions()4013 public Builder clearMaxVersions() { 4014 bitField0_ = (bitField0_ & ~0x00000020); 4015 maxVersions_ = 1; 4016 onChanged(); 4017 return this; 4018 } 4019 4020 // optional bool cache_blocks = 7 [default = true]; 4021 private boolean cacheBlocks_ = true; 4022 /** 4023 * <code>optional bool cache_blocks = 7 [default = true];</code> 4024 */ hasCacheBlocks()4025 public boolean hasCacheBlocks() { 4026 return ((bitField0_ & 0x00000040) == 0x00000040); 4027 } 4028 /** 4029 * <code>optional bool cache_blocks = 7 [default = true];</code> 4030 */ getCacheBlocks()4031 public boolean getCacheBlocks() { 4032 return cacheBlocks_; 4033 } 4034 /** 4035 * <code>optional bool cache_blocks = 7 [default = true];</code> 4036 */ setCacheBlocks(boolean value)4037 public Builder setCacheBlocks(boolean value) { 4038 bitField0_ |= 0x00000040; 4039 cacheBlocks_ = value; 4040 onChanged(); 4041 return this; 4042 } 4043 /** 4044 * <code>optional bool cache_blocks = 7 [default = true];</code> 4045 */ clearCacheBlocks()4046 public Builder clearCacheBlocks() { 4047 bitField0_ = (bitField0_ & ~0x00000040); 4048 cacheBlocks_ = true; 4049 onChanged(); 4050 return this; 4051 } 4052 4053 // optional uint32 store_limit = 8; 4054 private int storeLimit_ ; 4055 /** 4056 * <code>optional uint32 store_limit = 8;</code> 4057 */ hasStoreLimit()4058 public boolean hasStoreLimit() { 4059 return ((bitField0_ & 0x00000080) == 0x00000080); 4060 } 4061 /** 4062 * <code>optional uint32 store_limit = 8;</code> 4063 */ getStoreLimit()4064 public int getStoreLimit() { 4065 return storeLimit_; 4066 } 4067 /** 4068 * <code>optional uint32 store_limit = 8;</code> 4069 */ setStoreLimit(int value)4070 public Builder setStoreLimit(int value) { 4071 bitField0_ |= 0x00000080; 4072 storeLimit_ = value; 4073 onChanged(); 4074 return this; 4075 } 4076 /** 4077 * <code>optional uint32 store_limit = 8;</code> 4078 */ clearStoreLimit()4079 public Builder clearStoreLimit() { 4080 bitField0_ = (bitField0_ & ~0x00000080); 4081 storeLimit_ = 0; 4082 onChanged(); 4083 return this; 4084 } 4085 4086 // optional uint32 store_offset = 9; 4087 private int storeOffset_ ; 4088 /** 4089 * <code>optional uint32 store_offset = 9;</code> 4090 */ hasStoreOffset()4091 public boolean hasStoreOffset() { 4092 return ((bitField0_ & 0x00000100) == 0x00000100); 4093 } 4094 /** 4095 * <code>optional uint32 store_offset = 9;</code> 4096 */ getStoreOffset()4097 public int getStoreOffset() { 4098 return storeOffset_; 4099 } 4100 /** 4101 * <code>optional uint32 store_offset = 9;</code> 4102 */ setStoreOffset(int value)4103 public Builder setStoreOffset(int value) { 4104 bitField0_ |= 0x00000100; 4105 storeOffset_ = value; 4106 onChanged(); 4107 return this; 4108 } 4109 /** 4110 * <code>optional uint32 store_offset = 9;</code> 4111 */ clearStoreOffset()4112 public Builder clearStoreOffset() { 4113 bitField0_ = (bitField0_ & ~0x00000100); 4114 storeOffset_ = 0; 4115 onChanged(); 4116 return this; 4117 } 4118 4119 // optional bool existence_only = 10 [default = false]; 4120 private boolean existenceOnly_ ; 4121 /** 4122 * <code>optional bool existence_only = 10 [default = false];</code> 4123 * 4124 * <pre> 4125 * The result isn't asked for, just check for 4126 * the existence. 4127 * </pre> 4128 */ hasExistenceOnly()4129 public boolean hasExistenceOnly() { 4130 return ((bitField0_ & 0x00000200) == 0x00000200); 4131 } 4132 /** 4133 * <code>optional bool existence_only = 10 [default = false];</code> 4134 * 4135 * <pre> 4136 * The result isn't asked for, just check for 4137 * the existence. 4138 * </pre> 4139 */ getExistenceOnly()4140 public boolean getExistenceOnly() { 4141 return existenceOnly_; 4142 } 4143 /** 4144 * <code>optional bool existence_only = 10 [default = false];</code> 4145 * 4146 * <pre> 4147 * The result isn't asked for, just check for 4148 * the existence. 4149 * </pre> 4150 */ setExistenceOnly(boolean value)4151 public Builder setExistenceOnly(boolean value) { 4152 bitField0_ |= 0x00000200; 4153 existenceOnly_ = value; 4154 onChanged(); 4155 return this; 4156 } 4157 /** 4158 * <code>optional bool existence_only = 10 [default = false];</code> 4159 * 4160 * <pre> 4161 * The result isn't asked for, just check for 4162 * the existence. 4163 * </pre> 4164 */ clearExistenceOnly()4165 public Builder clearExistenceOnly() { 4166 bitField0_ = (bitField0_ & ~0x00000200); 4167 existenceOnly_ = false; 4168 onChanged(); 4169 return this; 4170 } 4171 4172 // optional bool closest_row_before = 11 [default = false]; 4173 private boolean closestRowBefore_ ; 4174 /** 4175 * <code>optional bool closest_row_before = 11 [default = false];</code> 4176 * 4177 * <pre> 4178 * If the row to get doesn't exist, return the 4179 * closest row before. 4180 * </pre> 4181 */ hasClosestRowBefore()4182 public boolean hasClosestRowBefore() { 4183 return ((bitField0_ & 0x00000400) == 0x00000400); 4184 } 4185 /** 4186 * <code>optional bool closest_row_before = 11 [default = false];</code> 4187 * 4188 * <pre> 4189 * If the row to get doesn't exist, return the 4190 * closest row before. 4191 * </pre> 4192 */ getClosestRowBefore()4193 public boolean getClosestRowBefore() { 4194 return closestRowBefore_; 4195 } 4196 /** 4197 * <code>optional bool closest_row_before = 11 [default = false];</code> 4198 * 4199 * <pre> 4200 * If the row to get doesn't exist, return the 4201 * closest row before. 4202 * </pre> 4203 */ setClosestRowBefore(boolean value)4204 public Builder setClosestRowBefore(boolean value) { 4205 bitField0_ |= 0x00000400; 4206 closestRowBefore_ = value; 4207 onChanged(); 4208 return this; 4209 } 4210 /** 4211 * <code>optional bool closest_row_before = 11 [default = false];</code> 4212 * 4213 * <pre> 4214 * If the row to get doesn't exist, return the 4215 * closest row before. 4216 * </pre> 4217 */ clearClosestRowBefore()4218 public Builder clearClosestRowBefore() { 4219 bitField0_ = (bitField0_ & ~0x00000400); 4220 closestRowBefore_ = false; 4221 onChanged(); 4222 return this; 4223 } 4224 4225 // optional .Consistency consistency = 12 [default = STRONG]; 4226 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 4227 /** 4228 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 4229 */ hasConsistency()4230 public boolean hasConsistency() { 4231 return ((bitField0_ & 0x00000800) == 0x00000800); 4232 } 4233 /** 4234 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 4235 */ getConsistency()4236 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() { 4237 return consistency_; 4238 } 4239 /** 4240 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 4241 */ setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value)4242 public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) { 4243 if (value == null) { 4244 throw new NullPointerException(); 4245 } 4246 bitField0_ |= 0x00000800; 4247 consistency_ = value; 4248 onChanged(); 4249 return this; 4250 } 4251 /** 4252 * <code>optional .Consistency consistency = 12 [default = STRONG];</code> 4253 */ clearConsistency()4254 public Builder clearConsistency() { 4255 bitField0_ = (bitField0_ & ~0x00000800); 4256 consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 4257 onChanged(); 4258 return this; 4259 } 4260 4261 // repeated .ColumnFamilyTimeRange cf_time_range = 13; 4262 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ = 4263 java.util.Collections.emptyList(); ensureCfTimeRangeIsMutable()4264 private void ensureCfTimeRangeIsMutable() { 4265 if (!((bitField0_ & 0x00001000) == 0x00001000)) { 4266 cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_); 4267 bitField0_ |= 0x00001000; 4268 } 4269 } 4270 4271 private com.google.protobuf.RepeatedFieldBuilder< 4272 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_; 4273 4274 /** 4275 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4276 */ getCfTimeRangeList()4277 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { 4278 if (cfTimeRangeBuilder_ == null) { 4279 return java.util.Collections.unmodifiableList(cfTimeRange_); 4280 } else { 4281 return cfTimeRangeBuilder_.getMessageList(); 4282 } 4283 } 4284 /** 4285 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4286 */ getCfTimeRangeCount()4287 public int getCfTimeRangeCount() { 4288 if (cfTimeRangeBuilder_ == null) { 4289 return cfTimeRange_.size(); 4290 } else { 4291 return cfTimeRangeBuilder_.getCount(); 4292 } 4293 } 4294 /** 4295 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4296 */ getCfTimeRange(int index)4297 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { 4298 if (cfTimeRangeBuilder_ == null) { 4299 return cfTimeRange_.get(index); 4300 } else { 4301 return cfTimeRangeBuilder_.getMessage(index); 4302 } 4303 } 4304 /** 4305 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4306 */ setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)4307 public Builder setCfTimeRange( 4308 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { 4309 if (cfTimeRangeBuilder_ == null) { 4310 if (value == null) { 4311 throw new NullPointerException(); 4312 } 4313 ensureCfTimeRangeIsMutable(); 4314 cfTimeRange_.set(index, value); 4315 onChanged(); 4316 } else { 4317 cfTimeRangeBuilder_.setMessage(index, value); 4318 } 4319 return this; 4320 } 4321 /** 4322 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4323 */ setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)4324 public Builder setCfTimeRange( 4325 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { 4326 if (cfTimeRangeBuilder_ == null) { 4327 ensureCfTimeRangeIsMutable(); 4328 cfTimeRange_.set(index, builderForValue.build()); 4329 onChanged(); 4330 } else { 4331 cfTimeRangeBuilder_.setMessage(index, builderForValue.build()); 4332 } 4333 return this; 4334 } 4335 /** 4336 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4337 */ addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)4338 public Builder addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { 4339 if (cfTimeRangeBuilder_ == null) { 4340 if (value == null) { 4341 throw new NullPointerException(); 4342 } 4343 ensureCfTimeRangeIsMutable(); 4344 cfTimeRange_.add(value); 4345 onChanged(); 4346 } else { 4347 cfTimeRangeBuilder_.addMessage(value); 4348 } 4349 return this; 4350 } 4351 /** 4352 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4353 */ addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)4354 public Builder addCfTimeRange( 4355 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { 4356 if (cfTimeRangeBuilder_ == null) { 4357 if (value == null) { 4358 throw new NullPointerException(); 4359 } 4360 ensureCfTimeRangeIsMutable(); 4361 cfTimeRange_.add(index, value); 4362 onChanged(); 4363 } else { 4364 cfTimeRangeBuilder_.addMessage(index, value); 4365 } 4366 return this; 4367 } 4368 /** 4369 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4370 */ addCfTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)4371 public Builder addCfTimeRange( 4372 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { 4373 if (cfTimeRangeBuilder_ == null) { 4374 ensureCfTimeRangeIsMutable(); 4375 cfTimeRange_.add(builderForValue.build()); 4376 onChanged(); 4377 } else { 4378 cfTimeRangeBuilder_.addMessage(builderForValue.build()); 4379 } 4380 return this; 4381 } 4382 /** 4383 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4384 */ addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)4385 public Builder addCfTimeRange( 4386 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { 4387 if (cfTimeRangeBuilder_ == null) { 4388 ensureCfTimeRangeIsMutable(); 4389 cfTimeRange_.add(index, builderForValue.build()); 4390 onChanged(); 4391 } else { 4392 cfTimeRangeBuilder_.addMessage(index, builderForValue.build()); 4393 } 4394 return this; 4395 } 4396 /** 4397 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4398 */ addAllCfTimeRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values)4399 public Builder addAllCfTimeRange( 4400 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) { 4401 if (cfTimeRangeBuilder_ == null) { 4402 ensureCfTimeRangeIsMutable(); 4403 super.addAll(values, cfTimeRange_); 4404 onChanged(); 4405 } else { 4406 cfTimeRangeBuilder_.addAllMessages(values); 4407 } 4408 return this; 4409 } 4410 /** 4411 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4412 */ clearCfTimeRange()4413 public Builder clearCfTimeRange() { 4414 if (cfTimeRangeBuilder_ == null) { 4415 cfTimeRange_ = java.util.Collections.emptyList(); 4416 bitField0_ = (bitField0_ & ~0x00001000); 4417 onChanged(); 4418 } else { 4419 cfTimeRangeBuilder_.clear(); 4420 } 4421 return this; 4422 } 4423 /** 4424 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4425 */ removeCfTimeRange(int index)4426 public Builder removeCfTimeRange(int index) { 4427 if (cfTimeRangeBuilder_ == null) { 4428 ensureCfTimeRangeIsMutable(); 4429 cfTimeRange_.remove(index); 4430 onChanged(); 4431 } else { 4432 cfTimeRangeBuilder_.remove(index); 4433 } 4434 return this; 4435 } 4436 /** 4437 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4438 */ getCfTimeRangeBuilder( int index)4439 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder( 4440 int index) { 4441 return getCfTimeRangeFieldBuilder().getBuilder(index); 4442 } 4443 /** 4444 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4445 */ getCfTimeRangeOrBuilder( int index)4446 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( 4447 int index) { 4448 if (cfTimeRangeBuilder_ == null) { 4449 return cfTimeRange_.get(index); } else { 4450 return cfTimeRangeBuilder_.getMessageOrBuilder(index); 4451 } 4452 } 4453 /** 4454 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4455 */ 4456 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList()4457 getCfTimeRangeOrBuilderList() { 4458 if (cfTimeRangeBuilder_ != null) { 4459 return cfTimeRangeBuilder_.getMessageOrBuilderList(); 4460 } else { 4461 return java.util.Collections.unmodifiableList(cfTimeRange_); 4462 } 4463 } 4464 /** 4465 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4466 */ addCfTimeRangeBuilder()4467 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() { 4468 return getCfTimeRangeFieldBuilder().addBuilder( 4469 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); 4470 } 4471 /** 4472 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4473 */ addCfTimeRangeBuilder( int index)4474 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder( 4475 int index) { 4476 return getCfTimeRangeFieldBuilder().addBuilder( 4477 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); 4478 } 4479 /** 4480 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 13;</code> 4481 */ 4482 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder> getCfTimeRangeBuilderList()4483 getCfTimeRangeBuilderList() { 4484 return getCfTimeRangeFieldBuilder().getBuilderList(); 4485 } 4486 private com.google.protobuf.RepeatedFieldBuilder< 4487 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeFieldBuilder()4488 getCfTimeRangeFieldBuilder() { 4489 if (cfTimeRangeBuilder_ == null) { 4490 cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 4491 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>( 4492 cfTimeRange_, 4493 ((bitField0_ & 0x00001000) == 0x00001000), 4494 getParentForChildren(), 4495 isClean()); 4496 cfTimeRange_ = null; 4497 } 4498 return cfTimeRangeBuilder_; 4499 } 4500 4501 // @@protoc_insertion_point(builder_scope:Get) 4502 } 4503 4504 static { 4505 defaultInstance = new Get(true); defaultInstance.initFields()4506 defaultInstance.initFields(); 4507 } 4508 4509 // @@protoc_insertion_point(class_scope:Get) 4510 } 4511 4512 public interface ResultOrBuilder 4513 extends com.google.protobuf.MessageOrBuilder { 4514 4515 // repeated .Cell cell = 1; 4516 /** 4517 * <code>repeated .Cell cell = 1;</code> 4518 * 4519 * <pre> 4520 * Result includes the Cells or else it just has a count of Cells 4521 * that are carried otherwise. 4522 * </pre> 4523 */ 4524 java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList()4525 getCellList(); 4526 /** 4527 * <code>repeated .Cell cell = 1;</code> 4528 * 4529 * <pre> 4530 * Result includes the Cells or else it just has a count of Cells 4531 * that are carried otherwise. 4532 * </pre> 4533 */ getCell(int index)4534 org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index); 4535 /** 4536 * <code>repeated .Cell cell = 1;</code> 4537 * 4538 * <pre> 4539 * Result includes the Cells or else it just has a count of Cells 4540 * that are carried otherwise. 4541 * </pre> 4542 */ getCellCount()4543 int getCellCount(); 4544 /** 4545 * <code>repeated .Cell cell = 1;</code> 4546 * 4547 * <pre> 4548 * Result includes the Cells or else it just has a count of Cells 4549 * that are carried otherwise. 4550 * </pre> 4551 */ 4552 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> getCellOrBuilderList()4553 getCellOrBuilderList(); 4554 /** 4555 * <code>repeated .Cell cell = 1;</code> 4556 * 4557 * <pre> 4558 * Result includes the Cells or else it just has a count of Cells 4559 * that are carried otherwise. 4560 * </pre> 4561 */ getCellOrBuilder( int index)4562 org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( 4563 int index); 4564 4565 // optional int32 associated_cell_count = 2; 4566 /** 4567 * <code>optional int32 associated_cell_count = 2;</code> 4568 * 4569 * <pre> 4570 * The below count is set when the associated cells are 4571 * not part of this protobuf message; they are passed alongside 4572 * and then this Message is just a placeholder with metadata. 4573 * The count is needed to know how many to peel off the block of Cells as 4574 * ours. NOTE: This is different from the pb managed cell_count of the 4575 * 'cell' field above which is non-null when the cells are pb'd. 4576 * </pre> 4577 */ hasAssociatedCellCount()4578 boolean hasAssociatedCellCount(); 4579 /** 4580 * <code>optional int32 associated_cell_count = 2;</code> 4581 * 4582 * <pre> 4583 * The below count is set when the associated cells are 4584 * not part of this protobuf message; they are passed alongside 4585 * and then this Message is just a placeholder with metadata. 4586 * The count is needed to know how many to peel off the block of Cells as 4587 * ours. NOTE: This is different from the pb managed cell_count of the 4588 * 'cell' field above which is non-null when the cells are pb'd. 4589 * </pre> 4590 */ getAssociatedCellCount()4591 int getAssociatedCellCount(); 4592 4593 // optional bool exists = 3; 4594 /** 4595 * <code>optional bool exists = 3;</code> 4596 * 4597 * <pre> 4598 * used for Get to check existence only. Not set if existence_only was not set to true 4599 * in the query. 4600 * </pre> 4601 */ hasExists()4602 boolean hasExists(); 4603 /** 4604 * <code>optional bool exists = 3;</code> 4605 * 4606 * <pre> 4607 * used for Get to check existence only. Not set if existence_only was not set to true 4608 * in the query. 4609 * </pre> 4610 */ getExists()4611 boolean getExists(); 4612 4613 // optional bool stale = 4 [default = false]; 4614 /** 4615 * <code>optional bool stale = 4 [default = false];</code> 4616 * 4617 * <pre> 4618 * Whether or not the results are coming from possibly stale data 4619 * </pre> 4620 */ hasStale()4621 boolean hasStale(); 4622 /** 4623 * <code>optional bool stale = 4 [default = false];</code> 4624 * 4625 * <pre> 4626 * Whether or not the results are coming from possibly stale data 4627 * </pre> 4628 */ getStale()4629 boolean getStale(); 4630 4631 // optional bool partial = 5 [default = false]; 4632 /** 4633 * <code>optional bool partial = 5 [default = false];</code> 4634 * 4635 * <pre> 4636 * Whether or not the entire result could be returned. Results will be split when 4637 * the RPC chunk size limit is reached. Partial results contain only a subset of the 4638 * cells for a row and must be combined with a result containing the remaining cells 4639 * to form a complete result 4640 * </pre> 4641 */ hasPartial()4642 boolean hasPartial(); 4643 /** 4644 * <code>optional bool partial = 5 [default = false];</code> 4645 * 4646 * <pre> 4647 * Whether or not the entire result could be returned. Results will be split when 4648 * the RPC chunk size limit is reached. Partial results contain only a subset of the 4649 * cells for a row and must be combined with a result containing the remaining cells 4650 * to form a complete result 4651 * </pre> 4652 */ getPartial()4653 boolean getPartial(); 4654 } 4655 /** 4656 * Protobuf type {@code Result} 4657 */ 4658 public static final class Result extends 4659 com.google.protobuf.GeneratedMessage 4660 implements ResultOrBuilder { 4661 // Use Result.newBuilder() to construct. Result(com.google.protobuf.GeneratedMessage.Builder<?> builder)4662 private Result(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 4663 super(builder); 4664 this.unknownFields = builder.getUnknownFields(); 4665 } Result(boolean noInit)4666 private Result(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 4667 4668 private static final Result defaultInstance; getDefaultInstance()4669 public static Result getDefaultInstance() { 4670 return defaultInstance; 4671 } 4672 getDefaultInstanceForType()4673 public Result getDefaultInstanceForType() { 4674 return defaultInstance; 4675 } 4676 4677 private final com.google.protobuf.UnknownFieldSet unknownFields; 4678 @java.lang.Override 4679 public final com.google.protobuf.UnknownFieldSet getUnknownFields()4680 getUnknownFields() { 4681 return this.unknownFields; 4682 } Result( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4683 private Result( 4684 com.google.protobuf.CodedInputStream input, 4685 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4686 throws com.google.protobuf.InvalidProtocolBufferException { 4687 initFields(); 4688 int mutable_bitField0_ = 0; 4689 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 4690 com.google.protobuf.UnknownFieldSet.newBuilder(); 4691 try { 4692 boolean done = false; 4693 while (!done) { 4694 int tag = input.readTag(); 4695 switch (tag) { 4696 case 0: 4697 done = true; 4698 break; 4699 default: { 4700 if (!parseUnknownField(input, unknownFields, 4701 extensionRegistry, tag)) { 4702 done = true; 4703 } 4704 break; 4705 } 4706 case 10: { 4707 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 4708 cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>(); 4709 mutable_bitField0_ |= 0x00000001; 4710 } 4711 cell_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.PARSER, extensionRegistry)); 4712 break; 4713 } 4714 case 16: { 4715 bitField0_ |= 0x00000001; 4716 associatedCellCount_ = input.readInt32(); 4717 break; 4718 } 4719 case 24: { 4720 bitField0_ |= 0x00000002; 4721 exists_ = input.readBool(); 4722 break; 4723 } 4724 case 32: { 4725 bitField0_ |= 0x00000004; 4726 stale_ = input.readBool(); 4727 break; 4728 } 4729 case 40: { 4730 bitField0_ |= 0x00000008; 4731 partial_ = input.readBool(); 4732 break; 4733 } 4734 } 4735 } 4736 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4737 throw e.setUnfinishedMessage(this); 4738 } catch (java.io.IOException e) { 4739 throw new com.google.protobuf.InvalidProtocolBufferException( 4740 e.getMessage()).setUnfinishedMessage(this); 4741 } finally { 4742 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 4743 cell_ = java.util.Collections.unmodifiableList(cell_); 4744 } 4745 this.unknownFields = unknownFields.build(); 4746 makeExtensionsImmutable(); 4747 } 4748 } 4749 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4750 getDescriptor() { 4751 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; 4752 } 4753 4754 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4755 internalGetFieldAccessorTable() { 4756 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable 4757 .ensureFieldAccessorsInitialized( 4758 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); 4759 } 4760 4761 public static com.google.protobuf.Parser<Result> PARSER = 4762 new com.google.protobuf.AbstractParser<Result>() { 4763 public Result parsePartialFrom( 4764 com.google.protobuf.CodedInputStream input, 4765 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4766 throws com.google.protobuf.InvalidProtocolBufferException { 4767 return new Result(input, extensionRegistry); 4768 } 4769 }; 4770 4771 @java.lang.Override getParserForType()4772 public com.google.protobuf.Parser<Result> getParserForType() { 4773 return PARSER; 4774 } 4775 4776 private int bitField0_; 4777 // repeated .Cell cell = 1; 4778 public static final int CELL_FIELD_NUMBER = 1; 4779 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_; 4780 /** 4781 * <code>repeated .Cell cell = 1;</code> 4782 * 4783 * <pre> 4784 * Result includes the Cells or else it just has a count of Cells 4785 * that are carried otherwise. 4786 * </pre> 4787 */ getCellList()4788 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() { 4789 return cell_; 4790 } 4791 /** 4792 * <code>repeated .Cell cell = 1;</code> 4793 * 4794 * <pre> 4795 * Result includes the Cells or else it just has a count of Cells 4796 * that are carried otherwise. 4797 * </pre> 4798 */ 4799 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> getCellOrBuilderList()4800 getCellOrBuilderList() { 4801 return cell_; 4802 } 4803 /** 4804 * <code>repeated .Cell cell = 1;</code> 4805 * 4806 * <pre> 4807 * Result includes the Cells or else it just has a count of Cells 4808 * that are carried otherwise. 4809 * </pre> 4810 */ getCellCount()4811 public int getCellCount() { 4812 return cell_.size(); 4813 } 4814 /** 4815 * <code>repeated .Cell cell = 1;</code> 4816 * 4817 * <pre> 4818 * Result includes the Cells or else it just has a count of Cells 4819 * that are carried otherwise. 4820 * </pre> 4821 */ getCell(int index)4822 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) { 4823 return cell_.get(index); 4824 } 4825 /** 4826 * <code>repeated .Cell cell = 1;</code> 4827 * 4828 * <pre> 4829 * Result includes the Cells or else it just has a count of Cells 4830 * that are carried otherwise. 4831 * </pre> 4832 */ getCellOrBuilder( int index)4833 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( 4834 int index) { 4835 return cell_.get(index); 4836 } 4837 4838 // optional int32 associated_cell_count = 2; 4839 public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 2; 4840 private int associatedCellCount_; 4841 /** 4842 * <code>optional int32 associated_cell_count = 2;</code> 4843 * 4844 * <pre> 4845 * The below count is set when the associated cells are 4846 * not part of this protobuf message; they are passed alongside 4847 * and then this Message is just a placeholder with metadata. 4848 * The count is needed to know how many to peel off the block of Cells as 4849 * ours. NOTE: This is different from the pb managed cell_count of the 4850 * 'cell' field above which is non-null when the cells are pb'd. 4851 * </pre> 4852 */ hasAssociatedCellCount()4853 public boolean hasAssociatedCellCount() { 4854 return ((bitField0_ & 0x00000001) == 0x00000001); 4855 } 4856 /** 4857 * <code>optional int32 associated_cell_count = 2;</code> 4858 * 4859 * <pre> 4860 * The below count is set when the associated cells are 4861 * not part of this protobuf message; they are passed alongside 4862 * and then this Message is just a placeholder with metadata. 4863 * The count is needed to know how many to peel off the block of Cells as 4864 * ours. NOTE: This is different from the pb managed cell_count of the 4865 * 'cell' field above which is non-null when the cells are pb'd. 4866 * </pre> 4867 */ getAssociatedCellCount()4868 public int getAssociatedCellCount() { 4869 return associatedCellCount_; 4870 } 4871 4872 // optional bool exists = 3; 4873 public static final int EXISTS_FIELD_NUMBER = 3; 4874 private boolean exists_; 4875 /** 4876 * <code>optional bool exists = 3;</code> 4877 * 4878 * <pre> 4879 * used for Get to check existence only. Not set if existence_only was not set to true 4880 * in the query. 4881 * </pre> 4882 */ hasExists()4883 public boolean hasExists() { 4884 return ((bitField0_ & 0x00000002) == 0x00000002); 4885 } 4886 /** 4887 * <code>optional bool exists = 3;</code> 4888 * 4889 * <pre> 4890 * used for Get to check existence only. Not set if existence_only was not set to true 4891 * in the query. 4892 * </pre> 4893 */ getExists()4894 public boolean getExists() { 4895 return exists_; 4896 } 4897 4898 // optional bool stale = 4 [default = false]; 4899 public static final int STALE_FIELD_NUMBER = 4; 4900 private boolean stale_; 4901 /** 4902 * <code>optional bool stale = 4 [default = false];</code> 4903 * 4904 * <pre> 4905 * Whether or not the results are coming from possibly stale data 4906 * </pre> 4907 */ hasStale()4908 public boolean hasStale() { 4909 return ((bitField0_ & 0x00000004) == 0x00000004); 4910 } 4911 /** 4912 * <code>optional bool stale = 4 [default = false];</code> 4913 * 4914 * <pre> 4915 * Whether or not the results are coming from possibly stale data 4916 * </pre> 4917 */ getStale()4918 public boolean getStale() { 4919 return stale_; 4920 } 4921 4922 // optional bool partial = 5 [default = false]; 4923 public static final int PARTIAL_FIELD_NUMBER = 5; 4924 private boolean partial_; 4925 /** 4926 * <code>optional bool partial = 5 [default = false];</code> 4927 * 4928 * <pre> 4929 * Whether or not the entire result could be returned. Results will be split when 4930 * the RPC chunk size limit is reached. Partial results contain only a subset of the 4931 * cells for a row and must be combined with a result containing the remaining cells 4932 * to form a complete result 4933 * </pre> 4934 */ hasPartial()4935 public boolean hasPartial() { 4936 return ((bitField0_ & 0x00000008) == 0x00000008); 4937 } 4938 /** 4939 * <code>optional bool partial = 5 [default = false];</code> 4940 * 4941 * <pre> 4942 * Whether or not the entire result could be returned. Results will be split when 4943 * the RPC chunk size limit is reached. Partial results contain only a subset of the 4944 * cells for a row and must be combined with a result containing the remaining cells 4945 * to form a complete result 4946 * </pre> 4947 */ getPartial()4948 public boolean getPartial() { 4949 return partial_; 4950 } 4951 initFields()4952 private void initFields() { 4953 cell_ = java.util.Collections.emptyList(); 4954 associatedCellCount_ = 0; 4955 exists_ = false; 4956 stale_ = false; 4957 partial_ = false; 4958 } 4959 private byte memoizedIsInitialized = -1; isInitialized()4960 public final boolean isInitialized() { 4961 byte isInitialized = memoizedIsInitialized; 4962 if (isInitialized != -1) return isInitialized == 1; 4963 4964 memoizedIsInitialized = 1; 4965 return true; 4966 } 4967 writeTo(com.google.protobuf.CodedOutputStream output)4968 public void writeTo(com.google.protobuf.CodedOutputStream output) 4969 throws java.io.IOException { 4970 getSerializedSize(); 4971 for (int i = 0; i < cell_.size(); i++) { 4972 output.writeMessage(1, cell_.get(i)); 4973 } 4974 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4975 output.writeInt32(2, associatedCellCount_); 4976 } 4977 if (((bitField0_ & 0x00000002) == 0x00000002)) { 4978 output.writeBool(3, exists_); 4979 } 4980 if (((bitField0_ & 0x00000004) == 0x00000004)) { 4981 output.writeBool(4, stale_); 4982 } 4983 if (((bitField0_ & 0x00000008) == 0x00000008)) { 4984 output.writeBool(5, partial_); 4985 } 4986 getUnknownFields().writeTo(output); 4987 } 4988 4989 private int memoizedSerializedSize = -1; getSerializedSize()4990 public int getSerializedSize() { 4991 int size = memoizedSerializedSize; 4992 if (size != -1) return size; 4993 4994 size = 0; 4995 for (int i = 0; i < cell_.size(); i++) { 4996 size += com.google.protobuf.CodedOutputStream 4997 .computeMessageSize(1, cell_.get(i)); 4998 } 4999 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5000 size += com.google.protobuf.CodedOutputStream 5001 .computeInt32Size(2, associatedCellCount_); 5002 } 5003 if (((bitField0_ & 0x00000002) == 0x00000002)) { 5004 size += com.google.protobuf.CodedOutputStream 5005 .computeBoolSize(3, exists_); 5006 } 5007 if (((bitField0_ & 0x00000004) == 0x00000004)) { 5008 size += com.google.protobuf.CodedOutputStream 5009 .computeBoolSize(4, stale_); 5010 } 5011 if (((bitField0_ & 0x00000008) == 0x00000008)) { 5012 size += com.google.protobuf.CodedOutputStream 5013 .computeBoolSize(5, partial_); 5014 } 5015 size += getUnknownFields().getSerializedSize(); 5016 memoizedSerializedSize = size; 5017 return size; 5018 } 5019 5020 private static final long serialVersionUID = 0L; 5021 @java.lang.Override writeReplace()5022 protected java.lang.Object writeReplace() 5023 throws java.io.ObjectStreamException { 5024 return super.writeReplace(); 5025 } 5026 5027 @java.lang.Override equals(final java.lang.Object obj)5028 public boolean equals(final java.lang.Object obj) { 5029 if (obj == this) { 5030 return true; 5031 } 5032 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)) { 5033 return super.equals(obj); 5034 } 5035 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) obj; 5036 5037 boolean result = true; 5038 result = result && getCellList() 5039 .equals(other.getCellList()); 5040 result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount()); 5041 if (hasAssociatedCellCount()) { 5042 result = result && (getAssociatedCellCount() 5043 == other.getAssociatedCellCount()); 5044 } 5045 result = result && (hasExists() == other.hasExists()); 5046 if (hasExists()) { 5047 result = result && (getExists() 5048 == other.getExists()); 5049 } 5050 result = result && (hasStale() == other.hasStale()); 5051 if (hasStale()) { 5052 result = result && (getStale() 5053 == other.getStale()); 5054 } 5055 result = result && (hasPartial() == other.hasPartial()); 5056 if (hasPartial()) { 5057 result = result && (getPartial() 5058 == other.getPartial()); 5059 } 5060 result = result && 5061 getUnknownFields().equals(other.getUnknownFields()); 5062 return result; 5063 } 5064 5065 private int memoizedHashCode = 0; 5066 @java.lang.Override hashCode()5067 public int hashCode() { 5068 if (memoizedHashCode != 0) { 5069 return memoizedHashCode; 5070 } 5071 int hash = 41; 5072 hash = (19 * hash) + getDescriptorForType().hashCode(); 5073 if (getCellCount() > 0) { 5074 hash = (37 * hash) + CELL_FIELD_NUMBER; 5075 hash = (53 * hash) + getCellList().hashCode(); 5076 } 5077 if (hasAssociatedCellCount()) { 5078 hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER; 5079 hash = (53 * hash) + getAssociatedCellCount(); 5080 } 5081 if (hasExists()) { 5082 hash = (37 * hash) + EXISTS_FIELD_NUMBER; 5083 hash = (53 * hash) + hashBoolean(getExists()); 5084 } 5085 if (hasStale()) { 5086 hash = (37 * hash) + STALE_FIELD_NUMBER; 5087 hash = (53 * hash) + hashBoolean(getStale()); 5088 } 5089 if (hasPartial()) { 5090 hash = (37 * hash) + PARTIAL_FIELD_NUMBER; 5091 hash = (53 * hash) + hashBoolean(getPartial()); 5092 } 5093 hash = (29 * hash) + getUnknownFields().hashCode(); 5094 memoizedHashCode = hash; 5095 return hash; 5096 } 5097 parseFrom( com.google.protobuf.ByteString data)5098 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( 5099 com.google.protobuf.ByteString data) 5100 throws com.google.protobuf.InvalidProtocolBufferException { 5101 return PARSER.parseFrom(data); 5102 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5103 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( 5104 com.google.protobuf.ByteString data, 5105 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5106 throws com.google.protobuf.InvalidProtocolBufferException { 5107 return PARSER.parseFrom(data, extensionRegistry); 5108 } parseFrom(byte[] data)5109 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(byte[] data) 5110 throws com.google.protobuf.InvalidProtocolBufferException { 5111 return PARSER.parseFrom(data); 5112 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5113 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( 5114 byte[] data, 5115 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5116 throws com.google.protobuf.InvalidProtocolBufferException { 5117 return PARSER.parseFrom(data, extensionRegistry); 5118 } parseFrom(java.io.InputStream input)5119 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom(java.io.InputStream input) 5120 throws java.io.IOException { 5121 return PARSER.parseFrom(input); 5122 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5123 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( 5124 java.io.InputStream input, 5125 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5126 throws java.io.IOException { 5127 return PARSER.parseFrom(input, extensionRegistry); 5128 } parseDelimitedFrom(java.io.InputStream input)5129 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom(java.io.InputStream input) 5130 throws java.io.IOException { 5131 return PARSER.parseDelimitedFrom(input); 5132 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5133 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseDelimitedFrom( 5134 java.io.InputStream input, 5135 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5136 throws java.io.IOException { 5137 return PARSER.parseDelimitedFrom(input, extensionRegistry); 5138 } parseFrom( com.google.protobuf.CodedInputStream input)5139 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( 5140 com.google.protobuf.CodedInputStream input) 5141 throws java.io.IOException { 5142 return PARSER.parseFrom(input); 5143 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5144 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parseFrom( 5145 com.google.protobuf.CodedInputStream input, 5146 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5147 throws java.io.IOException { 5148 return PARSER.parseFrom(input, extensionRegistry); 5149 } 5150 newBuilder()5151 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()5152 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype)5153 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result prototype) { 5154 return newBuilder().mergeFrom(prototype); 5155 } toBuilder()5156 public Builder toBuilder() { return newBuilder(this); } 5157 5158 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5159 protected Builder newBuilderForType( 5160 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5161 Builder builder = new Builder(parent); 5162 return builder; 5163 } 5164 /** 5165 * Protobuf type {@code Result} 5166 */ 5167 public static final class Builder extends 5168 com.google.protobuf.GeneratedMessage.Builder<Builder> 5169 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder { 5170 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5171 getDescriptor() { 5172 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; 5173 } 5174 5175 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5176 internalGetFieldAccessorTable() { 5177 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_fieldAccessorTable 5178 .ensureFieldAccessorsInitialized( 5179 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder.class); 5180 } 5181 5182 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder() Builder()5183 private Builder() { 5184 maybeForceBuilderInitialization(); 5185 } 5186 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5187 private Builder( 5188 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5189 super(parent); 5190 maybeForceBuilderInitialization(); 5191 } maybeForceBuilderInitialization()5192 private void maybeForceBuilderInitialization() { 5193 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 5194 getCellFieldBuilder(); 5195 } 5196 } create()5197 private static Builder create() { 5198 return new Builder(); 5199 } 5200 clear()5201 public Builder clear() { 5202 super.clear(); 5203 if (cellBuilder_ == null) { 5204 cell_ = java.util.Collections.emptyList(); 5205 bitField0_ = (bitField0_ & ~0x00000001); 5206 } else { 5207 cellBuilder_.clear(); 5208 } 5209 associatedCellCount_ = 0; 5210 bitField0_ = (bitField0_ & ~0x00000002); 5211 exists_ = false; 5212 bitField0_ = (bitField0_ & ~0x00000004); 5213 stale_ = false; 5214 bitField0_ = (bitField0_ & ~0x00000008); 5215 partial_ = false; 5216 bitField0_ = (bitField0_ & ~0x00000010); 5217 return this; 5218 } 5219 clone()5220 public Builder clone() { 5221 return create().mergeFrom(buildPartial()); 5222 } 5223 5224 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()5225 getDescriptorForType() { 5226 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Result_descriptor; 5227 } 5228 getDefaultInstanceForType()5229 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getDefaultInstanceForType() { 5230 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 5231 } 5232 build()5233 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result build() { 5234 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = buildPartial(); 5235 if (!result.isInitialized()) { 5236 throw newUninitializedMessageException(result); 5237 } 5238 return result; 5239 } 5240 buildPartial()5241 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result buildPartial() { 5242 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result(this); 5243 int from_bitField0_ = bitField0_; 5244 int to_bitField0_ = 0; 5245 if (cellBuilder_ == null) { 5246 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5247 cell_ = java.util.Collections.unmodifiableList(cell_); 5248 bitField0_ = (bitField0_ & ~0x00000001); 5249 } 5250 result.cell_ = cell_; 5251 } else { 5252 result.cell_ = cellBuilder_.build(); 5253 } 5254 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 5255 to_bitField0_ |= 0x00000001; 5256 } 5257 result.associatedCellCount_ = associatedCellCount_; 5258 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 5259 to_bitField0_ |= 0x00000002; 5260 } 5261 result.exists_ = exists_; 5262 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 5263 to_bitField0_ |= 0x00000004; 5264 } 5265 result.stale_ = stale_; 5266 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 5267 to_bitField0_ |= 0x00000008; 5268 } 5269 result.partial_ = partial_; 5270 result.bitField0_ = to_bitField0_; 5271 onBuilt(); 5272 return result; 5273 } 5274 mergeFrom(com.google.protobuf.Message other)5275 public Builder mergeFrom(com.google.protobuf.Message other) { 5276 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) { 5277 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result)other); 5278 } else { 5279 super.mergeFrom(other); 5280 return this; 5281 } 5282 } 5283 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other)5284 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result other) { 5285 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) return this; 5286 if (cellBuilder_ == null) { 5287 if (!other.cell_.isEmpty()) { 5288 if (cell_.isEmpty()) { 5289 cell_ = other.cell_; 5290 bitField0_ = (bitField0_ & ~0x00000001); 5291 } else { 5292 ensureCellIsMutable(); 5293 cell_.addAll(other.cell_); 5294 } 5295 onChanged(); 5296 } 5297 } else { 5298 if (!other.cell_.isEmpty()) { 5299 if (cellBuilder_.isEmpty()) { 5300 cellBuilder_.dispose(); 5301 cellBuilder_ = null; 5302 cell_ = other.cell_; 5303 bitField0_ = (bitField0_ & ~0x00000001); 5304 cellBuilder_ = 5305 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 5306 getCellFieldBuilder() : null; 5307 } else { 5308 cellBuilder_.addAllMessages(other.cell_); 5309 } 5310 } 5311 } 5312 if (other.hasAssociatedCellCount()) { 5313 setAssociatedCellCount(other.getAssociatedCellCount()); 5314 } 5315 if (other.hasExists()) { 5316 setExists(other.getExists()); 5317 } 5318 if (other.hasStale()) { 5319 setStale(other.getStale()); 5320 } 5321 if (other.hasPartial()) { 5322 setPartial(other.getPartial()); 5323 } 5324 this.mergeUnknownFields(other.getUnknownFields()); 5325 return this; 5326 } 5327 isInitialized()5328 public final boolean isInitialized() { 5329 return true; 5330 } 5331 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5332 public Builder mergeFrom( 5333 com.google.protobuf.CodedInputStream input, 5334 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5335 throws java.io.IOException { 5336 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result parsedMessage = null; 5337 try { 5338 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 5339 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5340 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result) e.getUnfinishedMessage(); 5341 throw e; 5342 } finally { 5343 if (parsedMessage != null) { 5344 mergeFrom(parsedMessage); 5345 } 5346 } 5347 return this; 5348 } 5349 private int bitField0_; 5350 5351 // repeated .Cell cell = 1; 5352 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> cell_ = 5353 java.util.Collections.emptyList(); ensureCellIsMutable()5354 private void ensureCellIsMutable() { 5355 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 5356 cell_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell>(cell_); 5357 bitField0_ |= 0x00000001; 5358 } 5359 } 5360 5361 private com.google.protobuf.RepeatedFieldBuilder< 5362 org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> cellBuilder_; 5363 5364 /** 5365 * <code>repeated .Cell cell = 1;</code> 5366 * 5367 * <pre> 5368 * Result includes the Cells or else it just has a count of Cells 5369 * that are carried otherwise. 5370 * </pre> 5371 */ getCellList()5372 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> getCellList() { 5373 if (cellBuilder_ == null) { 5374 return java.util.Collections.unmodifiableList(cell_); 5375 } else { 5376 return cellBuilder_.getMessageList(); 5377 } 5378 } 5379 /** 5380 * <code>repeated .Cell cell = 1;</code> 5381 * 5382 * <pre> 5383 * Result includes the Cells or else it just has a count of Cells 5384 * that are carried otherwise. 5385 * </pre> 5386 */ getCellCount()5387 public int getCellCount() { 5388 if (cellBuilder_ == null) { 5389 return cell_.size(); 5390 } else { 5391 return cellBuilder_.getCount(); 5392 } 5393 } 5394 /** 5395 * <code>repeated .Cell cell = 1;</code> 5396 * 5397 * <pre> 5398 * Result includes the Cells or else it just has a count of Cells 5399 * that are carried otherwise. 5400 * </pre> 5401 */ getCell(int index)5402 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getCell(int index) { 5403 if (cellBuilder_ == null) { 5404 return cell_.get(index); 5405 } else { 5406 return cellBuilder_.getMessage(index); 5407 } 5408 } 5409 /** 5410 * <code>repeated .Cell cell = 1;</code> 5411 * 5412 * <pre> 5413 * Result includes the Cells or else it just has a count of Cells 5414 * that are carried otherwise. 5415 * </pre> 5416 */ setCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value)5417 public Builder setCell( 5418 int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) { 5419 if (cellBuilder_ == null) { 5420 if (value == null) { 5421 throw new NullPointerException(); 5422 } 5423 ensureCellIsMutable(); 5424 cell_.set(index, value); 5425 onChanged(); 5426 } else { 5427 cellBuilder_.setMessage(index, value); 5428 } 5429 return this; 5430 } 5431 /** 5432 * <code>repeated .Cell cell = 1;</code> 5433 * 5434 * <pre> 5435 * Result includes the Cells or else it just has a count of Cells 5436 * that are carried otherwise. 5437 * </pre> 5438 */ setCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue)5439 public Builder setCell( 5440 int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) { 5441 if (cellBuilder_ == null) { 5442 ensureCellIsMutable(); 5443 cell_.set(index, builderForValue.build()); 5444 onChanged(); 5445 } else { 5446 cellBuilder_.setMessage(index, builderForValue.build()); 5447 } 5448 return this; 5449 } 5450 /** 5451 * <code>repeated .Cell cell = 1;</code> 5452 * 5453 * <pre> 5454 * Result includes the Cells or else it just has a count of Cells 5455 * that are carried otherwise. 5456 * </pre> 5457 */ addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value)5458 public Builder addCell(org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) { 5459 if (cellBuilder_ == null) { 5460 if (value == null) { 5461 throw new NullPointerException(); 5462 } 5463 ensureCellIsMutable(); 5464 cell_.add(value); 5465 onChanged(); 5466 } else { 5467 cellBuilder_.addMessage(value); 5468 } 5469 return this; 5470 } 5471 /** 5472 * <code>repeated .Cell cell = 1;</code> 5473 * 5474 * <pre> 5475 * Result includes the Cells or else it just has a count of Cells 5476 * that are carried otherwise. 5477 * </pre> 5478 */ addCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value)5479 public Builder addCell( 5480 int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell value) { 5481 if (cellBuilder_ == null) { 5482 if (value == null) { 5483 throw new NullPointerException(); 5484 } 5485 ensureCellIsMutable(); 5486 cell_.add(index, value); 5487 onChanged(); 5488 } else { 5489 cellBuilder_.addMessage(index, value); 5490 } 5491 return this; 5492 } 5493 /** 5494 * <code>repeated .Cell cell = 1;</code> 5495 * 5496 * <pre> 5497 * Result includes the Cells or else it just has a count of Cells 5498 * that are carried otherwise. 5499 * </pre> 5500 */ addCell( org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue)5501 public Builder addCell( 5502 org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) { 5503 if (cellBuilder_ == null) { 5504 ensureCellIsMutable(); 5505 cell_.add(builderForValue.build()); 5506 onChanged(); 5507 } else { 5508 cellBuilder_.addMessage(builderForValue.build()); 5509 } 5510 return this; 5511 } 5512 /** 5513 * <code>repeated .Cell cell = 1;</code> 5514 * 5515 * <pre> 5516 * Result includes the Cells or else it just has a count of Cells 5517 * that are carried otherwise. 5518 * </pre> 5519 */ addCell( int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue)5520 public Builder addCell( 5521 int index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder builderForValue) { 5522 if (cellBuilder_ == null) { 5523 ensureCellIsMutable(); 5524 cell_.add(index, builderForValue.build()); 5525 onChanged(); 5526 } else { 5527 cellBuilder_.addMessage(index, builderForValue.build()); 5528 } 5529 return this; 5530 } 5531 /** 5532 * <code>repeated .Cell cell = 1;</code> 5533 * 5534 * <pre> 5535 * Result includes the Cells or else it just has a count of Cells 5536 * that are carried otherwise. 5537 * </pre> 5538 */ addAllCell( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values)5539 public Builder addAllCell( 5540 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell> values) { 5541 if (cellBuilder_ == null) { 5542 ensureCellIsMutable(); 5543 super.addAll(values, cell_); 5544 onChanged(); 5545 } else { 5546 cellBuilder_.addAllMessages(values); 5547 } 5548 return this; 5549 } 5550 /** 5551 * <code>repeated .Cell cell = 1;</code> 5552 * 5553 * <pre> 5554 * Result includes the Cells or else it just has a count of Cells 5555 * that are carried otherwise. 5556 * </pre> 5557 */ clearCell()5558 public Builder clearCell() { 5559 if (cellBuilder_ == null) { 5560 cell_ = java.util.Collections.emptyList(); 5561 bitField0_ = (bitField0_ & ~0x00000001); 5562 onChanged(); 5563 } else { 5564 cellBuilder_.clear(); 5565 } 5566 return this; 5567 } 5568 /** 5569 * <code>repeated .Cell cell = 1;</code> 5570 * 5571 * <pre> 5572 * Result includes the Cells or else it just has a count of Cells 5573 * that are carried otherwise. 5574 * </pre> 5575 */ removeCell(int index)5576 public Builder removeCell(int index) { 5577 if (cellBuilder_ == null) { 5578 ensureCellIsMutable(); 5579 cell_.remove(index); 5580 onChanged(); 5581 } else { 5582 cellBuilder_.remove(index); 5583 } 5584 return this; 5585 } 5586 /** 5587 * <code>repeated .Cell cell = 1;</code> 5588 * 5589 * <pre> 5590 * Result includes the Cells or else it just has a count of Cells 5591 * that are carried otherwise. 5592 * </pre> 5593 */ getCellBuilder( int index)5594 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder getCellBuilder( 5595 int index) { 5596 return getCellFieldBuilder().getBuilder(index); 5597 } 5598 /** 5599 * <code>repeated .Cell cell = 1;</code> 5600 * 5601 * <pre> 5602 * Result includes the Cells or else it just has a count of Cells 5603 * that are carried otherwise. 5604 * </pre> 5605 */ getCellOrBuilder( int index)5606 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder getCellOrBuilder( 5607 int index) { 5608 if (cellBuilder_ == null) { 5609 return cell_.get(index); } else { 5610 return cellBuilder_.getMessageOrBuilder(index); 5611 } 5612 } 5613 /** 5614 * <code>repeated .Cell cell = 1;</code> 5615 * 5616 * <pre> 5617 * Result includes the Cells or else it just has a count of Cells 5618 * that are carried otherwise. 5619 * </pre> 5620 */ 5621 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> getCellOrBuilderList()5622 getCellOrBuilderList() { 5623 if (cellBuilder_ != null) { 5624 return cellBuilder_.getMessageOrBuilderList(); 5625 } else { 5626 return java.util.Collections.unmodifiableList(cell_); 5627 } 5628 } 5629 /** 5630 * <code>repeated .Cell cell = 1;</code> 5631 * 5632 * <pre> 5633 * Result includes the Cells or else it just has a count of Cells 5634 * that are carried otherwise. 5635 * </pre> 5636 */ addCellBuilder()5637 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder() { 5638 return getCellFieldBuilder().addBuilder( 5639 org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance()); 5640 } 5641 /** 5642 * <code>repeated .Cell cell = 1;</code> 5643 * 5644 * <pre> 5645 * Result includes the Cells or else it just has a count of Cells 5646 * that are carried otherwise. 5647 * </pre> 5648 */ addCellBuilder( int index)5649 public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder addCellBuilder( 5650 int index) { 5651 return getCellFieldBuilder().addBuilder( 5652 index, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.getDefaultInstance()); 5653 } 5654 /** 5655 * <code>repeated .Cell cell = 1;</code> 5656 * 5657 * <pre> 5658 * Result includes the Cells or else it just has a count of Cells 5659 * that are carried otherwise. 5660 * </pre> 5661 */ 5662 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder> getCellBuilderList()5663 getCellBuilderList() { 5664 return getCellFieldBuilder().getBuilderList(); 5665 } 5666 private com.google.protobuf.RepeatedFieldBuilder< 5667 org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder> getCellFieldBuilder()5668 getCellFieldBuilder() { 5669 if (cellBuilder_ == null) { 5670 cellBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 5671 org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder, org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder>( 5672 cell_, 5673 ((bitField0_ & 0x00000001) == 0x00000001), 5674 getParentForChildren(), 5675 isClean()); 5676 cell_ = null; 5677 } 5678 return cellBuilder_; 5679 } 5680 5681 // optional int32 associated_cell_count = 2; 5682 private int associatedCellCount_ ; 5683 /** 5684 * <code>optional int32 associated_cell_count = 2;</code> 5685 * 5686 * <pre> 5687 * The below count is set when the associated cells are 5688 * not part of this protobuf message; they are passed alongside 5689 * and then this Message is just a placeholder with metadata. 5690 * The count is needed to know how many to peel off the block of Cells as 5691 * ours. NOTE: This is different from the pb managed cell_count of the 5692 * 'cell' field above which is non-null when the cells are pb'd. 5693 * </pre> 5694 */ hasAssociatedCellCount()5695 public boolean hasAssociatedCellCount() { 5696 return ((bitField0_ & 0x00000002) == 0x00000002); 5697 } 5698 /** 5699 * <code>optional int32 associated_cell_count = 2;</code> 5700 * 5701 * <pre> 5702 * The below count is set when the associated cells are 5703 * not part of this protobuf message; they are passed alongside 5704 * and then this Message is just a placeholder with metadata. 5705 * The count is needed to know how many to peel off the block of Cells as 5706 * ours. NOTE: This is different from the pb managed cell_count of the 5707 * 'cell' field above which is non-null when the cells are pb'd. 5708 * </pre> 5709 */ getAssociatedCellCount()5710 public int getAssociatedCellCount() { 5711 return associatedCellCount_; 5712 } 5713 /** 5714 * <code>optional int32 associated_cell_count = 2;</code> 5715 * 5716 * <pre> 5717 * The below count is set when the associated cells are 5718 * not part of this protobuf message; they are passed alongside 5719 * and then this Message is just a placeholder with metadata. 5720 * The count is needed to know how many to peel off the block of Cells as 5721 * ours. NOTE: This is different from the pb managed cell_count of the 5722 * 'cell' field above which is non-null when the cells are pb'd. 5723 * </pre> 5724 */ setAssociatedCellCount(int value)5725 public Builder setAssociatedCellCount(int value) { 5726 bitField0_ |= 0x00000002; 5727 associatedCellCount_ = value; 5728 onChanged(); 5729 return this; 5730 } 5731 /** 5732 * <code>optional int32 associated_cell_count = 2;</code> 5733 * 5734 * <pre> 5735 * The below count is set when the associated cells are 5736 * not part of this protobuf message; they are passed alongside 5737 * and then this Message is just a placeholder with metadata. 5738 * The count is needed to know how many to peel off the block of Cells as 5739 * ours. NOTE: This is different from the pb managed cell_count of the 5740 * 'cell' field above which is non-null when the cells are pb'd. 5741 * </pre> 5742 */ clearAssociatedCellCount()5743 public Builder clearAssociatedCellCount() { 5744 bitField0_ = (bitField0_ & ~0x00000002); 5745 associatedCellCount_ = 0; 5746 onChanged(); 5747 return this; 5748 } 5749 5750 // optional bool exists = 3; 5751 private boolean exists_ ; 5752 /** 5753 * <code>optional bool exists = 3;</code> 5754 * 5755 * <pre> 5756 * used for Get to check existence only. Not set if existence_only was not set to true 5757 * in the query. 5758 * </pre> 5759 */ hasExists()5760 public boolean hasExists() { 5761 return ((bitField0_ & 0x00000004) == 0x00000004); 5762 } 5763 /** 5764 * <code>optional bool exists = 3;</code> 5765 * 5766 * <pre> 5767 * used for Get to check existence only. Not set if existence_only was not set to true 5768 * in the query. 5769 * </pre> 5770 */ getExists()5771 public boolean getExists() { 5772 return exists_; 5773 } 5774 /** 5775 * <code>optional bool exists = 3;</code> 5776 * 5777 * <pre> 5778 * used for Get to check existence only. Not set if existence_only was not set to true 5779 * in the query. 5780 * </pre> 5781 */ setExists(boolean value)5782 public Builder setExists(boolean value) { 5783 bitField0_ |= 0x00000004; 5784 exists_ = value; 5785 onChanged(); 5786 return this; 5787 } 5788 /** 5789 * <code>optional bool exists = 3;</code> 5790 * 5791 * <pre> 5792 * used for Get to check existence only. Not set if existence_only was not set to true 5793 * in the query. 5794 * </pre> 5795 */ clearExists()5796 public Builder clearExists() { 5797 bitField0_ = (bitField0_ & ~0x00000004); 5798 exists_ = false; 5799 onChanged(); 5800 return this; 5801 } 5802 5803 // optional bool stale = 4 [default = false]; 5804 private boolean stale_ ; 5805 /** 5806 * <code>optional bool stale = 4 [default = false];</code> 5807 * 5808 * <pre> 5809 * Whether or not the results are coming from possibly stale data 5810 * </pre> 5811 */ hasStale()5812 public boolean hasStale() { 5813 return ((bitField0_ & 0x00000008) == 0x00000008); 5814 } 5815 /** 5816 * <code>optional bool stale = 4 [default = false];</code> 5817 * 5818 * <pre> 5819 * Whether or not the results are coming from possibly stale data 5820 * </pre> 5821 */ getStale()5822 public boolean getStale() { 5823 return stale_; 5824 } 5825 /** 5826 * <code>optional bool stale = 4 [default = false];</code> 5827 * 5828 * <pre> 5829 * Whether or not the results are coming from possibly stale data 5830 * </pre> 5831 */ setStale(boolean value)5832 public Builder setStale(boolean value) { 5833 bitField0_ |= 0x00000008; 5834 stale_ = value; 5835 onChanged(); 5836 return this; 5837 } 5838 /** 5839 * <code>optional bool stale = 4 [default = false];</code> 5840 * 5841 * <pre> 5842 * Whether or not the results are coming from possibly stale data 5843 * </pre> 5844 */ clearStale()5845 public Builder clearStale() { 5846 bitField0_ = (bitField0_ & ~0x00000008); 5847 stale_ = false; 5848 onChanged(); 5849 return this; 5850 } 5851 5852 // optional bool partial = 5 [default = false]; 5853 private boolean partial_ ; 5854 /** 5855 * <code>optional bool partial = 5 [default = false];</code> 5856 * 5857 * <pre> 5858 * Whether or not the entire result could be returned. Results will be split when 5859 * the RPC chunk size limit is reached. Partial results contain only a subset of the 5860 * cells for a row and must be combined with a result containing the remaining cells 5861 * to form a complete result 5862 * </pre> 5863 */ hasPartial()5864 public boolean hasPartial() { 5865 return ((bitField0_ & 0x00000010) == 0x00000010); 5866 } 5867 /** 5868 * <code>optional bool partial = 5 [default = false];</code> 5869 * 5870 * <pre> 5871 * Whether or not the entire result could be returned. Results will be split when 5872 * the RPC chunk size limit is reached. Partial results contain only a subset of the 5873 * cells for a row and must be combined with a result containing the remaining cells 5874 * to form a complete result 5875 * </pre> 5876 */ getPartial()5877 public boolean getPartial() { 5878 return partial_; 5879 } 5880 /** 5881 * <code>optional bool partial = 5 [default = false];</code> 5882 * 5883 * <pre> 5884 * Whether or not the entire result could be returned. Results will be split when 5885 * the RPC chunk size limit is reached. Partial results contain only a subset of the 5886 * cells for a row and must be combined with a result containing the remaining cells 5887 * to form a complete result 5888 * </pre> 5889 */ setPartial(boolean value)5890 public Builder setPartial(boolean value) { 5891 bitField0_ |= 0x00000010; 5892 partial_ = value; 5893 onChanged(); 5894 return this; 5895 } 5896 /** 5897 * <code>optional bool partial = 5 [default = false];</code> 5898 * 5899 * <pre> 5900 * Whether or not the entire result could be returned. Results will be split when 5901 * the RPC chunk size limit is reached. Partial results contain only a subset of the 5902 * cells for a row and must be combined with a result containing the remaining cells 5903 * to form a complete result 5904 * </pre> 5905 */ clearPartial()5906 public Builder clearPartial() { 5907 bitField0_ = (bitField0_ & ~0x00000010); 5908 partial_ = false; 5909 onChanged(); 5910 return this; 5911 } 5912 5913 // @@protoc_insertion_point(builder_scope:Result) 5914 } 5915 5916 static { 5917 defaultInstance = new Result(true); defaultInstance.initFields()5918 defaultInstance.initFields(); 5919 } 5920 5921 // @@protoc_insertion_point(class_scope:Result) 5922 } 5923 5924 public interface GetRequestOrBuilder 5925 extends com.google.protobuf.MessageOrBuilder { 5926 5927 // required .RegionSpecifier region = 1; 5928 /** 5929 * <code>required .RegionSpecifier region = 1;</code> 5930 */ hasRegion()5931 boolean hasRegion(); 5932 /** 5933 * <code>required .RegionSpecifier region = 1;</code> 5934 */ getRegion()5935 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 5936 /** 5937 * <code>required .RegionSpecifier region = 1;</code> 5938 */ getRegionOrBuilder()5939 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 5940 5941 // required .Get get = 2; 5942 /** 5943 * <code>required .Get get = 2;</code> 5944 */ hasGet()5945 boolean hasGet(); 5946 /** 5947 * <code>required .Get get = 2;</code> 5948 */ getGet()5949 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet(); 5950 /** 5951 * <code>required .Get get = 2;</code> 5952 */ getGetOrBuilder()5953 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder(); 5954 } 5955 /** 5956 * Protobuf type {@code GetRequest} 5957 * 5958 * <pre> 5959 ** 5960 * The get request. Perform a single Get operation. 5961 * </pre> 5962 */ 5963 public static final class GetRequest extends 5964 com.google.protobuf.GeneratedMessage 5965 implements GetRequestOrBuilder { 5966 // Use GetRequest.newBuilder() to construct. GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)5967 private GetRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 5968 super(builder); 5969 this.unknownFields = builder.getUnknownFields(); 5970 } GetRequest(boolean noInit)5971 private GetRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 5972 5973 private static final GetRequest defaultInstance; getDefaultInstance()5974 public static GetRequest getDefaultInstance() { 5975 return defaultInstance; 5976 } 5977 getDefaultInstanceForType()5978 public GetRequest getDefaultInstanceForType() { 5979 return defaultInstance; 5980 } 5981 5982 private final com.google.protobuf.UnknownFieldSet unknownFields; 5983 @java.lang.Override 5984 public final com.google.protobuf.UnknownFieldSet getUnknownFields()5985 getUnknownFields() { 5986 return this.unknownFields; 5987 } GetRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5988 private GetRequest( 5989 com.google.protobuf.CodedInputStream input, 5990 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5991 throws com.google.protobuf.InvalidProtocolBufferException { 5992 initFields(); 5993 int mutable_bitField0_ = 0; 5994 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 5995 com.google.protobuf.UnknownFieldSet.newBuilder(); 5996 try { 5997 boolean done = false; 5998 while (!done) { 5999 int tag = input.readTag(); 6000 switch (tag) { 6001 case 0: 6002 done = true; 6003 break; 6004 default: { 6005 if (!parseUnknownField(input, unknownFields, 6006 extensionRegistry, tag)) { 6007 done = true; 6008 } 6009 break; 6010 } 6011 case 10: { 6012 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 6013 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6014 subBuilder = region_.toBuilder(); 6015 } 6016 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 6017 if (subBuilder != null) { 6018 subBuilder.mergeFrom(region_); 6019 region_ = subBuilder.buildPartial(); 6020 } 6021 bitField0_ |= 0x00000001; 6022 break; 6023 } 6024 case 18: { 6025 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder subBuilder = null; 6026 if (((bitField0_ & 0x00000002) == 0x00000002)) { 6027 subBuilder = get_.toBuilder(); 6028 } 6029 get_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.PARSER, extensionRegistry); 6030 if (subBuilder != null) { 6031 subBuilder.mergeFrom(get_); 6032 get_ = subBuilder.buildPartial(); 6033 } 6034 bitField0_ |= 0x00000002; 6035 break; 6036 } 6037 } 6038 } 6039 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6040 throw e.setUnfinishedMessage(this); 6041 } catch (java.io.IOException e) { 6042 throw new com.google.protobuf.InvalidProtocolBufferException( 6043 e.getMessage()).setUnfinishedMessage(this); 6044 } finally { 6045 this.unknownFields = unknownFields.build(); 6046 makeExtensionsImmutable(); 6047 } 6048 } 6049 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6050 getDescriptor() { 6051 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; 6052 } 6053 6054 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6055 internalGetFieldAccessorTable() { 6056 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable 6057 .ensureFieldAccessorsInitialized( 6058 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); 6059 } 6060 6061 public static com.google.protobuf.Parser<GetRequest> PARSER = 6062 new com.google.protobuf.AbstractParser<GetRequest>() { 6063 public GetRequest parsePartialFrom( 6064 com.google.protobuf.CodedInputStream input, 6065 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6066 throws com.google.protobuf.InvalidProtocolBufferException { 6067 return new GetRequest(input, extensionRegistry); 6068 } 6069 }; 6070 6071 @java.lang.Override getParserForType()6072 public com.google.protobuf.Parser<GetRequest> getParserForType() { 6073 return PARSER; 6074 } 6075 6076 private int bitField0_; 6077 // required .RegionSpecifier region = 1; 6078 public static final int REGION_FIELD_NUMBER = 1; 6079 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 6080 /** 6081 * <code>required .RegionSpecifier region = 1;</code> 6082 */ hasRegion()6083 public boolean hasRegion() { 6084 return ((bitField0_ & 0x00000001) == 0x00000001); 6085 } 6086 /** 6087 * <code>required .RegionSpecifier region = 1;</code> 6088 */ getRegion()6089 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 6090 return region_; 6091 } 6092 /** 6093 * <code>required .RegionSpecifier region = 1;</code> 6094 */ getRegionOrBuilder()6095 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 6096 return region_; 6097 } 6098 6099 // required .Get get = 2; 6100 public static final int GET_FIELD_NUMBER = 2; 6101 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_; 6102 /** 6103 * <code>required .Get get = 2;</code> 6104 */ hasGet()6105 public boolean hasGet() { 6106 return ((bitField0_ & 0x00000002) == 0x00000002); 6107 } 6108 /** 6109 * <code>required .Get get = 2;</code> 6110 */ getGet()6111 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { 6112 return get_; 6113 } 6114 /** 6115 * <code>required .Get get = 2;</code> 6116 */ getGetOrBuilder()6117 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { 6118 return get_; 6119 } 6120 initFields()6121 private void initFields() { 6122 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 6123 get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); 6124 } 6125 private byte memoizedIsInitialized = -1; isInitialized()6126 public final boolean isInitialized() { 6127 byte isInitialized = memoizedIsInitialized; 6128 if (isInitialized != -1) return isInitialized == 1; 6129 6130 if (!hasRegion()) { 6131 memoizedIsInitialized = 0; 6132 return false; 6133 } 6134 if (!hasGet()) { 6135 memoizedIsInitialized = 0; 6136 return false; 6137 } 6138 if (!getRegion().isInitialized()) { 6139 memoizedIsInitialized = 0; 6140 return false; 6141 } 6142 if (!getGet().isInitialized()) { 6143 memoizedIsInitialized = 0; 6144 return false; 6145 } 6146 memoizedIsInitialized = 1; 6147 return true; 6148 } 6149 writeTo(com.google.protobuf.CodedOutputStream output)6150 public void writeTo(com.google.protobuf.CodedOutputStream output) 6151 throws java.io.IOException { 6152 getSerializedSize(); 6153 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6154 output.writeMessage(1, region_); 6155 } 6156 if (((bitField0_ & 0x00000002) == 0x00000002)) { 6157 output.writeMessage(2, get_); 6158 } 6159 getUnknownFields().writeTo(output); 6160 } 6161 6162 private int memoizedSerializedSize = -1; getSerializedSize()6163 public int getSerializedSize() { 6164 int size = memoizedSerializedSize; 6165 if (size != -1) return size; 6166 6167 size = 0; 6168 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6169 size += com.google.protobuf.CodedOutputStream 6170 .computeMessageSize(1, region_); 6171 } 6172 if (((bitField0_ & 0x00000002) == 0x00000002)) { 6173 size += com.google.protobuf.CodedOutputStream 6174 .computeMessageSize(2, get_); 6175 } 6176 size += getUnknownFields().getSerializedSize(); 6177 memoizedSerializedSize = size; 6178 return size; 6179 } 6180 6181 private static final long serialVersionUID = 0L; 6182 @java.lang.Override writeReplace()6183 protected java.lang.Object writeReplace() 6184 throws java.io.ObjectStreamException { 6185 return super.writeReplace(); 6186 } 6187 6188 @java.lang.Override equals(final java.lang.Object obj)6189 public boolean equals(final java.lang.Object obj) { 6190 if (obj == this) { 6191 return true; 6192 } 6193 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)) { 6194 return super.equals(obj); 6195 } 6196 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) obj; 6197 6198 boolean result = true; 6199 result = result && (hasRegion() == other.hasRegion()); 6200 if (hasRegion()) { 6201 result = result && getRegion() 6202 .equals(other.getRegion()); 6203 } 6204 result = result && (hasGet() == other.hasGet()); 6205 if (hasGet()) { 6206 result = result && getGet() 6207 .equals(other.getGet()); 6208 } 6209 result = result && 6210 getUnknownFields().equals(other.getUnknownFields()); 6211 return result; 6212 } 6213 6214 private int memoizedHashCode = 0; 6215 @java.lang.Override hashCode()6216 public int hashCode() { 6217 if (memoizedHashCode != 0) { 6218 return memoizedHashCode; 6219 } 6220 int hash = 41; 6221 hash = (19 * hash) + getDescriptorForType().hashCode(); 6222 if (hasRegion()) { 6223 hash = (37 * hash) + REGION_FIELD_NUMBER; 6224 hash = (53 * hash) + getRegion().hashCode(); 6225 } 6226 if (hasGet()) { 6227 hash = (37 * hash) + GET_FIELD_NUMBER; 6228 hash = (53 * hash) + getGet().hashCode(); 6229 } 6230 hash = (29 * hash) + getUnknownFields().hashCode(); 6231 memoizedHashCode = hash; 6232 return hash; 6233 } 6234 parseFrom( com.google.protobuf.ByteString data)6235 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( 6236 com.google.protobuf.ByteString data) 6237 throws com.google.protobuf.InvalidProtocolBufferException { 6238 return PARSER.parseFrom(data); 6239 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6240 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( 6241 com.google.protobuf.ByteString data, 6242 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6243 throws com.google.protobuf.InvalidProtocolBufferException { 6244 return PARSER.parseFrom(data, extensionRegistry); 6245 } parseFrom(byte[] data)6246 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(byte[] data) 6247 throws com.google.protobuf.InvalidProtocolBufferException { 6248 return PARSER.parseFrom(data); 6249 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6250 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( 6251 byte[] data, 6252 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6253 throws com.google.protobuf.InvalidProtocolBufferException { 6254 return PARSER.parseFrom(data, extensionRegistry); 6255 } parseFrom(java.io.InputStream input)6256 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom(java.io.InputStream input) 6257 throws java.io.IOException { 6258 return PARSER.parseFrom(input); 6259 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6260 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( 6261 java.io.InputStream input, 6262 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6263 throws java.io.IOException { 6264 return PARSER.parseFrom(input, extensionRegistry); 6265 } parseDelimitedFrom(java.io.InputStream input)6266 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) 6267 throws java.io.IOException { 6268 return PARSER.parseDelimitedFrom(input); 6269 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6270 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseDelimitedFrom( 6271 java.io.InputStream input, 6272 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6273 throws java.io.IOException { 6274 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6275 } parseFrom( com.google.protobuf.CodedInputStream input)6276 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( 6277 com.google.protobuf.CodedInputStream input) 6278 throws java.io.IOException { 6279 return PARSER.parseFrom(input); 6280 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6281 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parseFrom( 6282 com.google.protobuf.CodedInputStream input, 6283 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6284 throws java.io.IOException { 6285 return PARSER.parseFrom(input, extensionRegistry); 6286 } 6287 newBuilder()6288 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6289 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype)6290 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest prototype) { 6291 return newBuilder().mergeFrom(prototype); 6292 } toBuilder()6293 public Builder toBuilder() { return newBuilder(this); } 6294 6295 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6296 protected Builder newBuilderForType( 6297 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6298 Builder builder = new Builder(parent); 6299 return builder; 6300 } 6301 /** 6302 * Protobuf type {@code GetRequest} 6303 * 6304 * <pre> 6305 ** 6306 * The get request. Perform a single Get operation. 6307 * </pre> 6308 */ 6309 public static final class Builder extends 6310 com.google.protobuf.GeneratedMessage.Builder<Builder> 6311 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequestOrBuilder { 6312 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6313 getDescriptor() { 6314 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; 6315 } 6316 6317 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6318 internalGetFieldAccessorTable() { 6319 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_fieldAccessorTable 6320 .ensureFieldAccessorsInitialized( 6321 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.Builder.class); 6322 } 6323 6324 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.newBuilder() Builder()6325 private Builder() { 6326 maybeForceBuilderInitialization(); 6327 } 6328 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6329 private Builder( 6330 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6331 super(parent); 6332 maybeForceBuilderInitialization(); 6333 } maybeForceBuilderInitialization()6334 private void maybeForceBuilderInitialization() { 6335 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6336 getRegionFieldBuilder(); 6337 getGetFieldBuilder(); 6338 } 6339 } create()6340 private static Builder create() { 6341 return new Builder(); 6342 } 6343 clear()6344 public Builder clear() { 6345 super.clear(); 6346 if (regionBuilder_ == null) { 6347 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 6348 } else { 6349 regionBuilder_.clear(); 6350 } 6351 bitField0_ = (bitField0_ & ~0x00000001); 6352 if (getBuilder_ == null) { 6353 get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); 6354 } else { 6355 getBuilder_.clear(); 6356 } 6357 bitField0_ = (bitField0_ & ~0x00000002); 6358 return this; 6359 } 6360 clone()6361 public Builder clone() { 6362 return create().mergeFrom(buildPartial()); 6363 } 6364 6365 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6366 getDescriptorForType() { 6367 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetRequest_descriptor; 6368 } 6369 getDefaultInstanceForType()6370 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest getDefaultInstanceForType() { 6371 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance(); 6372 } 6373 build()6374 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest build() { 6375 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = buildPartial(); 6376 if (!result.isInitialized()) { 6377 throw newUninitializedMessageException(result); 6378 } 6379 return result; 6380 } 6381 buildPartial()6382 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest buildPartial() { 6383 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest(this); 6384 int from_bitField0_ = bitField0_; 6385 int to_bitField0_ = 0; 6386 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 6387 to_bitField0_ |= 0x00000001; 6388 } 6389 if (regionBuilder_ == null) { 6390 result.region_ = region_; 6391 } else { 6392 result.region_ = regionBuilder_.build(); 6393 } 6394 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 6395 to_bitField0_ |= 0x00000002; 6396 } 6397 if (getBuilder_ == null) { 6398 result.get_ = get_; 6399 } else { 6400 result.get_ = getBuilder_.build(); 6401 } 6402 result.bitField0_ = to_bitField0_; 6403 onBuilt(); 6404 return result; 6405 } 6406 mergeFrom(com.google.protobuf.Message other)6407 public Builder mergeFrom(com.google.protobuf.Message other) { 6408 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) { 6409 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest)other); 6410 } else { 6411 super.mergeFrom(other); 6412 return this; 6413 } 6414 } 6415 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other)6416 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest other) { 6417 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest.getDefaultInstance()) return this; 6418 if (other.hasRegion()) { 6419 mergeRegion(other.getRegion()); 6420 } 6421 if (other.hasGet()) { 6422 mergeGet(other.getGet()); 6423 } 6424 this.mergeUnknownFields(other.getUnknownFields()); 6425 return this; 6426 } 6427 isInitialized()6428 public final boolean isInitialized() { 6429 if (!hasRegion()) { 6430 6431 return false; 6432 } 6433 if (!hasGet()) { 6434 6435 return false; 6436 } 6437 if (!getRegion().isInitialized()) { 6438 6439 return false; 6440 } 6441 if (!getGet().isInitialized()) { 6442 6443 return false; 6444 } 6445 return true; 6446 } 6447 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6448 public Builder mergeFrom( 6449 com.google.protobuf.CodedInputStream input, 6450 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6451 throws java.io.IOException { 6452 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest parsedMessage = null; 6453 try { 6454 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6455 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6456 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest) e.getUnfinishedMessage(); 6457 throw e; 6458 } finally { 6459 if (parsedMessage != null) { 6460 mergeFrom(parsedMessage); 6461 } 6462 } 6463 return this; 6464 } 6465 private int bitField0_; 6466 6467 // required .RegionSpecifier region = 1; 6468 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 6469 private com.google.protobuf.SingleFieldBuilder< 6470 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 6471 /** 6472 * <code>required .RegionSpecifier region = 1;</code> 6473 */ hasRegion()6474 public boolean hasRegion() { 6475 return ((bitField0_ & 0x00000001) == 0x00000001); 6476 } 6477 /** 6478 * <code>required .RegionSpecifier region = 1;</code> 6479 */ getRegion()6480 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 6481 if (regionBuilder_ == null) { 6482 return region_; 6483 } else { 6484 return regionBuilder_.getMessage(); 6485 } 6486 } 6487 /** 6488 * <code>required .RegionSpecifier region = 1;</code> 6489 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)6490 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 6491 if (regionBuilder_ == null) { 6492 if (value == null) { 6493 throw new NullPointerException(); 6494 } 6495 region_ = value; 6496 onChanged(); 6497 } else { 6498 regionBuilder_.setMessage(value); 6499 } 6500 bitField0_ |= 0x00000001; 6501 return this; 6502 } 6503 /** 6504 * <code>required .RegionSpecifier region = 1;</code> 6505 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)6506 public Builder setRegion( 6507 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 6508 if (regionBuilder_ == null) { 6509 region_ = builderForValue.build(); 6510 onChanged(); 6511 } else { 6512 regionBuilder_.setMessage(builderForValue.build()); 6513 } 6514 bitField0_ |= 0x00000001; 6515 return this; 6516 } 6517 /** 6518 * <code>required .RegionSpecifier region = 1;</code> 6519 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)6520 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 6521 if (regionBuilder_ == null) { 6522 if (((bitField0_ & 0x00000001) == 0x00000001) && 6523 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 6524 region_ = 6525 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 6526 } else { 6527 region_ = value; 6528 } 6529 onChanged(); 6530 } else { 6531 regionBuilder_.mergeFrom(value); 6532 } 6533 bitField0_ |= 0x00000001; 6534 return this; 6535 } 6536 /** 6537 * <code>required .RegionSpecifier region = 1;</code> 6538 */ clearRegion()6539 public Builder clearRegion() { 6540 if (regionBuilder_ == null) { 6541 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 6542 onChanged(); 6543 } else { 6544 regionBuilder_.clear(); 6545 } 6546 bitField0_ = (bitField0_ & ~0x00000001); 6547 return this; 6548 } 6549 /** 6550 * <code>required .RegionSpecifier region = 1;</code> 6551 */ getRegionBuilder()6552 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 6553 bitField0_ |= 0x00000001; 6554 onChanged(); 6555 return getRegionFieldBuilder().getBuilder(); 6556 } 6557 /** 6558 * <code>required .RegionSpecifier region = 1;</code> 6559 */ getRegionOrBuilder()6560 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 6561 if (regionBuilder_ != null) { 6562 return regionBuilder_.getMessageOrBuilder(); 6563 } else { 6564 return region_; 6565 } 6566 } 6567 /** 6568 * <code>required .RegionSpecifier region = 1;</code> 6569 */ 6570 private com.google.protobuf.SingleFieldBuilder< 6571 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()6572 getRegionFieldBuilder() { 6573 if (regionBuilder_ == null) { 6574 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 6575 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 6576 region_, 6577 getParentForChildren(), 6578 isClean()); 6579 region_ = null; 6580 } 6581 return regionBuilder_; 6582 } 6583 6584 // required .Get get = 2; 6585 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); 6586 private com.google.protobuf.SingleFieldBuilder< 6587 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getBuilder_; 6588 /** 6589 * <code>required .Get get = 2;</code> 6590 */ hasGet()6591 public boolean hasGet() { 6592 return ((bitField0_ & 0x00000002) == 0x00000002); 6593 } 6594 /** 6595 * <code>required .Get get = 2;</code> 6596 */ getGet()6597 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet() { 6598 if (getBuilder_ == null) { 6599 return get_; 6600 } else { 6601 return getBuilder_.getMessage(); 6602 } 6603 } 6604 /** 6605 * <code>required .Get get = 2;</code> 6606 */ setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value)6607 public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { 6608 if (getBuilder_ == null) { 6609 if (value == null) { 6610 throw new NullPointerException(); 6611 } 6612 get_ = value; 6613 onChanged(); 6614 } else { 6615 getBuilder_.setMessage(value); 6616 } 6617 bitField0_ |= 0x00000002; 6618 return this; 6619 } 6620 /** 6621 * <code>required .Get get = 2;</code> 6622 */ setGet( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue)6623 public Builder setGet( 6624 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder builderForValue) { 6625 if (getBuilder_ == null) { 6626 get_ = builderForValue.build(); 6627 onChanged(); 6628 } else { 6629 getBuilder_.setMessage(builderForValue.build()); 6630 } 6631 bitField0_ |= 0x00000002; 6632 return this; 6633 } 6634 /** 6635 * <code>required .Get get = 2;</code> 6636 */ mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value)6637 public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get value) { 6638 if (getBuilder_ == null) { 6639 if (((bitField0_ & 0x00000002) == 0x00000002) && 6640 get_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance()) { 6641 get_ = 6642 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); 6643 } else { 6644 get_ = value; 6645 } 6646 onChanged(); 6647 } else { 6648 getBuilder_.mergeFrom(value); 6649 } 6650 bitField0_ |= 0x00000002; 6651 return this; 6652 } 6653 /** 6654 * <code>required .Get get = 2;</code> 6655 */ clearGet()6656 public Builder clearGet() { 6657 if (getBuilder_ == null) { 6658 get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance(); 6659 onChanged(); 6660 } else { 6661 getBuilder_.clear(); 6662 } 6663 bitField0_ = (bitField0_ & ~0x00000002); 6664 return this; 6665 } 6666 /** 6667 * <code>required .Get get = 2;</code> 6668 */ getGetBuilder()6669 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder getGetBuilder() { 6670 bitField0_ |= 0x00000002; 6671 onChanged(); 6672 return getGetFieldBuilder().getBuilder(); 6673 } 6674 /** 6675 * <code>required .Get get = 2;</code> 6676 */ getGetOrBuilder()6677 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder() { 6678 if (getBuilder_ != null) { 6679 return getBuilder_.getMessageOrBuilder(); 6680 } else { 6681 return get_; 6682 } 6683 } 6684 /** 6685 * <code>required .Get get = 2;</code> 6686 */ 6687 private com.google.protobuf.SingleFieldBuilder< 6688 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder> getGetFieldBuilder()6689 getGetFieldBuilder() { 6690 if (getBuilder_ == null) { 6691 getBuilder_ = new com.google.protobuf.SingleFieldBuilder< 6692 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder>( 6693 get_, 6694 getParentForChildren(), 6695 isClean()); 6696 get_ = null; 6697 } 6698 return getBuilder_; 6699 } 6700 6701 // @@protoc_insertion_point(builder_scope:GetRequest) 6702 } 6703 6704 static { 6705 defaultInstance = new GetRequest(true); defaultInstance.initFields()6706 defaultInstance.initFields(); 6707 } 6708 6709 // @@protoc_insertion_point(class_scope:GetRequest) 6710 } 6711 6712 public interface GetResponseOrBuilder 6713 extends com.google.protobuf.MessageOrBuilder { 6714 6715 // optional .Result result = 1; 6716 /** 6717 * <code>optional .Result result = 1;</code> 6718 */ hasResult()6719 boolean hasResult(); 6720 /** 6721 * <code>optional .Result result = 1;</code> 6722 */ getResult()6723 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); 6724 /** 6725 * <code>optional .Result result = 1;</code> 6726 */ getResultOrBuilder()6727 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); 6728 } 6729 /** 6730 * Protobuf type {@code GetResponse} 6731 */ 6732 public static final class GetResponse extends 6733 com.google.protobuf.GeneratedMessage 6734 implements GetResponseOrBuilder { 6735 // Use GetResponse.newBuilder() to construct. GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)6736 private GetResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 6737 super(builder); 6738 this.unknownFields = builder.getUnknownFields(); 6739 } GetResponse(boolean noInit)6740 private GetResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 6741 6742 private static final GetResponse defaultInstance; getDefaultInstance()6743 public static GetResponse getDefaultInstance() { 6744 return defaultInstance; 6745 } 6746 getDefaultInstanceForType()6747 public GetResponse getDefaultInstanceForType() { 6748 return defaultInstance; 6749 } 6750 6751 private final com.google.protobuf.UnknownFieldSet unknownFields; 6752 @java.lang.Override 6753 public final com.google.protobuf.UnknownFieldSet getUnknownFields()6754 getUnknownFields() { 6755 return this.unknownFields; 6756 } GetResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6757 private GetResponse( 6758 com.google.protobuf.CodedInputStream input, 6759 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6760 throws com.google.protobuf.InvalidProtocolBufferException { 6761 initFields(); 6762 int mutable_bitField0_ = 0; 6763 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6764 com.google.protobuf.UnknownFieldSet.newBuilder(); 6765 try { 6766 boolean done = false; 6767 while (!done) { 6768 int tag = input.readTag(); 6769 switch (tag) { 6770 case 0: 6771 done = true; 6772 break; 6773 default: { 6774 if (!parseUnknownField(input, unknownFields, 6775 extensionRegistry, tag)) { 6776 done = true; 6777 } 6778 break; 6779 } 6780 case 10: { 6781 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; 6782 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6783 subBuilder = result_.toBuilder(); 6784 } 6785 result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); 6786 if (subBuilder != null) { 6787 subBuilder.mergeFrom(result_); 6788 result_ = subBuilder.buildPartial(); 6789 } 6790 bitField0_ |= 0x00000001; 6791 break; 6792 } 6793 } 6794 } 6795 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6796 throw e.setUnfinishedMessage(this); 6797 } catch (java.io.IOException e) { 6798 throw new com.google.protobuf.InvalidProtocolBufferException( 6799 e.getMessage()).setUnfinishedMessage(this); 6800 } finally { 6801 this.unknownFields = unknownFields.build(); 6802 makeExtensionsImmutable(); 6803 } 6804 } 6805 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6806 getDescriptor() { 6807 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; 6808 } 6809 6810 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6811 internalGetFieldAccessorTable() { 6812 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable 6813 .ensureFieldAccessorsInitialized( 6814 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); 6815 } 6816 6817 public static com.google.protobuf.Parser<GetResponse> PARSER = 6818 new com.google.protobuf.AbstractParser<GetResponse>() { 6819 public GetResponse parsePartialFrom( 6820 com.google.protobuf.CodedInputStream input, 6821 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6822 throws com.google.protobuf.InvalidProtocolBufferException { 6823 return new GetResponse(input, extensionRegistry); 6824 } 6825 }; 6826 6827 @java.lang.Override getParserForType()6828 public com.google.protobuf.Parser<GetResponse> getParserForType() { 6829 return PARSER; 6830 } 6831 6832 private int bitField0_; 6833 // optional .Result result = 1; 6834 public static final int RESULT_FIELD_NUMBER = 1; 6835 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; 6836 /** 6837 * <code>optional .Result result = 1;</code> 6838 */ hasResult()6839 public boolean hasResult() { 6840 return ((bitField0_ & 0x00000001) == 0x00000001); 6841 } 6842 /** 6843 * <code>optional .Result result = 1;</code> 6844 */ getResult()6845 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { 6846 return result_; 6847 } 6848 /** 6849 * <code>optional .Result result = 1;</code> 6850 */ getResultOrBuilder()6851 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { 6852 return result_; 6853 } 6854 initFields()6855 private void initFields() { 6856 result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 6857 } 6858 private byte memoizedIsInitialized = -1; isInitialized()6859 public final boolean isInitialized() { 6860 byte isInitialized = memoizedIsInitialized; 6861 if (isInitialized != -1) return isInitialized == 1; 6862 6863 memoizedIsInitialized = 1; 6864 return true; 6865 } 6866 writeTo(com.google.protobuf.CodedOutputStream output)6867 public void writeTo(com.google.protobuf.CodedOutputStream output) 6868 throws java.io.IOException { 6869 getSerializedSize(); 6870 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6871 output.writeMessage(1, result_); 6872 } 6873 getUnknownFields().writeTo(output); 6874 } 6875 6876 private int memoizedSerializedSize = -1; getSerializedSize()6877 public int getSerializedSize() { 6878 int size = memoizedSerializedSize; 6879 if (size != -1) return size; 6880 6881 size = 0; 6882 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6883 size += com.google.protobuf.CodedOutputStream 6884 .computeMessageSize(1, result_); 6885 } 6886 size += getUnknownFields().getSerializedSize(); 6887 memoizedSerializedSize = size; 6888 return size; 6889 } 6890 6891 private static final long serialVersionUID = 0L; 6892 @java.lang.Override writeReplace()6893 protected java.lang.Object writeReplace() 6894 throws java.io.ObjectStreamException { 6895 return super.writeReplace(); 6896 } 6897 6898 @java.lang.Override equals(final java.lang.Object obj)6899 public boolean equals(final java.lang.Object obj) { 6900 if (obj == this) { 6901 return true; 6902 } 6903 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)) { 6904 return super.equals(obj); 6905 } 6906 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) obj; 6907 6908 boolean result = true; 6909 result = result && (hasResult() == other.hasResult()); 6910 if (hasResult()) { 6911 result = result && getResult() 6912 .equals(other.getResult()); 6913 } 6914 result = result && 6915 getUnknownFields().equals(other.getUnknownFields()); 6916 return result; 6917 } 6918 6919 private int memoizedHashCode = 0; 6920 @java.lang.Override hashCode()6921 public int hashCode() { 6922 if (memoizedHashCode != 0) { 6923 return memoizedHashCode; 6924 } 6925 int hash = 41; 6926 hash = (19 * hash) + getDescriptorForType().hashCode(); 6927 if (hasResult()) { 6928 hash = (37 * hash) + RESULT_FIELD_NUMBER; 6929 hash = (53 * hash) + getResult().hashCode(); 6930 } 6931 hash = (29 * hash) + getUnknownFields().hashCode(); 6932 memoizedHashCode = hash; 6933 return hash; 6934 } 6935 parseFrom( com.google.protobuf.ByteString data)6936 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( 6937 com.google.protobuf.ByteString data) 6938 throws com.google.protobuf.InvalidProtocolBufferException { 6939 return PARSER.parseFrom(data); 6940 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6941 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( 6942 com.google.protobuf.ByteString data, 6943 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6944 throws com.google.protobuf.InvalidProtocolBufferException { 6945 return PARSER.parseFrom(data, extensionRegistry); 6946 } parseFrom(byte[] data)6947 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(byte[] data) 6948 throws com.google.protobuf.InvalidProtocolBufferException { 6949 return PARSER.parseFrom(data); 6950 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6951 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( 6952 byte[] data, 6953 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6954 throws com.google.protobuf.InvalidProtocolBufferException { 6955 return PARSER.parseFrom(data, extensionRegistry); 6956 } parseFrom(java.io.InputStream input)6957 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom(java.io.InputStream input) 6958 throws java.io.IOException { 6959 return PARSER.parseFrom(input); 6960 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6961 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( 6962 java.io.InputStream input, 6963 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6964 throws java.io.IOException { 6965 return PARSER.parseFrom(input, extensionRegistry); 6966 } parseDelimitedFrom(java.io.InputStream input)6967 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) 6968 throws java.io.IOException { 6969 return PARSER.parseDelimitedFrom(input); 6970 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6971 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseDelimitedFrom( 6972 java.io.InputStream input, 6973 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6974 throws java.io.IOException { 6975 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6976 } parseFrom( com.google.protobuf.CodedInputStream input)6977 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( 6978 com.google.protobuf.CodedInputStream input) 6979 throws java.io.IOException { 6980 return PARSER.parseFrom(input); 6981 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6982 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parseFrom( 6983 com.google.protobuf.CodedInputStream input, 6984 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6985 throws java.io.IOException { 6986 return PARSER.parseFrom(input, extensionRegistry); 6987 } 6988 newBuilder()6989 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6990 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype)6991 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse prototype) { 6992 return newBuilder().mergeFrom(prototype); 6993 } toBuilder()6994 public Builder toBuilder() { return newBuilder(this); } 6995 6996 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6997 protected Builder newBuilderForType( 6998 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6999 Builder builder = new Builder(parent); 7000 return builder; 7001 } 7002 /** 7003 * Protobuf type {@code GetResponse} 7004 */ 7005 public static final class Builder extends 7006 com.google.protobuf.GeneratedMessage.Builder<Builder> 7007 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponseOrBuilder { 7008 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7009 getDescriptor() { 7010 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; 7011 } 7012 7013 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7014 internalGetFieldAccessorTable() { 7015 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_fieldAccessorTable 7016 .ensureFieldAccessorsInitialized( 7017 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.Builder.class); 7018 } 7019 7020 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.newBuilder() Builder()7021 private Builder() { 7022 maybeForceBuilderInitialization(); 7023 } 7024 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7025 private Builder( 7026 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7027 super(parent); 7028 maybeForceBuilderInitialization(); 7029 } maybeForceBuilderInitialization()7030 private void maybeForceBuilderInitialization() { 7031 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7032 getResultFieldBuilder(); 7033 } 7034 } create()7035 private static Builder create() { 7036 return new Builder(); 7037 } 7038 clear()7039 public Builder clear() { 7040 super.clear(); 7041 if (resultBuilder_ == null) { 7042 result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 7043 } else { 7044 resultBuilder_.clear(); 7045 } 7046 bitField0_ = (bitField0_ & ~0x00000001); 7047 return this; 7048 } 7049 clone()7050 public Builder clone() { 7051 return create().mergeFrom(buildPartial()); 7052 } 7053 7054 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7055 getDescriptorForType() { 7056 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_GetResponse_descriptor; 7057 } 7058 getDefaultInstanceForType()7059 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse getDefaultInstanceForType() { 7060 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance(); 7061 } 7062 build()7063 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse build() { 7064 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = buildPartial(); 7065 if (!result.isInitialized()) { 7066 throw newUninitializedMessageException(result); 7067 } 7068 return result; 7069 } 7070 buildPartial()7071 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse buildPartial() { 7072 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse(this); 7073 int from_bitField0_ = bitField0_; 7074 int to_bitField0_ = 0; 7075 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 7076 to_bitField0_ |= 0x00000001; 7077 } 7078 if (resultBuilder_ == null) { 7079 result.result_ = result_; 7080 } else { 7081 result.result_ = resultBuilder_.build(); 7082 } 7083 result.bitField0_ = to_bitField0_; 7084 onBuilt(); 7085 return result; 7086 } 7087 mergeFrom(com.google.protobuf.Message other)7088 public Builder mergeFrom(com.google.protobuf.Message other) { 7089 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) { 7090 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse)other); 7091 } else { 7092 super.mergeFrom(other); 7093 return this; 7094 } 7095 } 7096 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other)7097 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse other) { 7098 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse.getDefaultInstance()) return this; 7099 if (other.hasResult()) { 7100 mergeResult(other.getResult()); 7101 } 7102 this.mergeUnknownFields(other.getUnknownFields()); 7103 return this; 7104 } 7105 isInitialized()7106 public final boolean isInitialized() { 7107 return true; 7108 } 7109 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7110 public Builder mergeFrom( 7111 com.google.protobuf.CodedInputStream input, 7112 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7113 throws java.io.IOException { 7114 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse parsedMessage = null; 7115 try { 7116 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 7117 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7118 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse) e.getUnfinishedMessage(); 7119 throw e; 7120 } finally { 7121 if (parsedMessage != null) { 7122 mergeFrom(parsedMessage); 7123 } 7124 } 7125 return this; 7126 } 7127 private int bitField0_; 7128 7129 // optional .Result result = 1; 7130 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 7131 private com.google.protobuf.SingleFieldBuilder< 7132 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; 7133 /** 7134 * <code>optional .Result result = 1;</code> 7135 */ hasResult()7136 public boolean hasResult() { 7137 return ((bitField0_ & 0x00000001) == 0x00000001); 7138 } 7139 /** 7140 * <code>optional .Result result = 1;</code> 7141 */ getResult()7142 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { 7143 if (resultBuilder_ == null) { 7144 return result_; 7145 } else { 7146 return resultBuilder_.getMessage(); 7147 } 7148 } 7149 /** 7150 * <code>optional .Result result = 1;</code> 7151 */ setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)7152 public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { 7153 if (resultBuilder_ == null) { 7154 if (value == null) { 7155 throw new NullPointerException(); 7156 } 7157 result_ = value; 7158 onChanged(); 7159 } else { 7160 resultBuilder_.setMessage(value); 7161 } 7162 bitField0_ |= 0x00000001; 7163 return this; 7164 } 7165 /** 7166 * <code>optional .Result result = 1;</code> 7167 */ setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)7168 public Builder setResult( 7169 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { 7170 if (resultBuilder_ == null) { 7171 result_ = builderForValue.build(); 7172 onChanged(); 7173 } else { 7174 resultBuilder_.setMessage(builderForValue.build()); 7175 } 7176 bitField0_ |= 0x00000001; 7177 return this; 7178 } 7179 /** 7180 * <code>optional .Result result = 1;</code> 7181 */ mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)7182 public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { 7183 if (resultBuilder_ == null) { 7184 if (((bitField0_ & 0x00000001) == 0x00000001) && 7185 result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { 7186 result_ = 7187 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); 7188 } else { 7189 result_ = value; 7190 } 7191 onChanged(); 7192 } else { 7193 resultBuilder_.mergeFrom(value); 7194 } 7195 bitField0_ |= 0x00000001; 7196 return this; 7197 } 7198 /** 7199 * <code>optional .Result result = 1;</code> 7200 */ clearResult()7201 public Builder clearResult() { 7202 if (resultBuilder_ == null) { 7203 result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 7204 onChanged(); 7205 } else { 7206 resultBuilder_.clear(); 7207 } 7208 bitField0_ = (bitField0_ & ~0x00000001); 7209 return this; 7210 } 7211 /** 7212 * <code>optional .Result result = 1;</code> 7213 */ getResultBuilder()7214 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { 7215 bitField0_ |= 0x00000001; 7216 onChanged(); 7217 return getResultFieldBuilder().getBuilder(); 7218 } 7219 /** 7220 * <code>optional .Result result = 1;</code> 7221 */ getResultOrBuilder()7222 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { 7223 if (resultBuilder_ != null) { 7224 return resultBuilder_.getMessageOrBuilder(); 7225 } else { 7226 return result_; 7227 } 7228 } 7229 /** 7230 * <code>optional .Result result = 1;</code> 7231 */ 7232 private com.google.protobuf.SingleFieldBuilder< 7233 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder()7234 getResultFieldBuilder() { 7235 if (resultBuilder_ == null) { 7236 resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< 7237 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( 7238 result_, 7239 getParentForChildren(), 7240 isClean()); 7241 result_ = null; 7242 } 7243 return resultBuilder_; 7244 } 7245 7246 // @@protoc_insertion_point(builder_scope:GetResponse) 7247 } 7248 7249 static { 7250 defaultInstance = new GetResponse(true); defaultInstance.initFields()7251 defaultInstance.initFields(); 7252 } 7253 7254 // @@protoc_insertion_point(class_scope:GetResponse) 7255 } 7256 7257 public interface ConditionOrBuilder 7258 extends com.google.protobuf.MessageOrBuilder { 7259 7260 // required bytes row = 1; 7261 /** 7262 * <code>required bytes row = 1;</code> 7263 */ hasRow()7264 boolean hasRow(); 7265 /** 7266 * <code>required bytes row = 1;</code> 7267 */ getRow()7268 com.google.protobuf.ByteString getRow(); 7269 7270 // required bytes family = 2; 7271 /** 7272 * <code>required bytes family = 2;</code> 7273 */ hasFamily()7274 boolean hasFamily(); 7275 /** 7276 * <code>required bytes family = 2;</code> 7277 */ getFamily()7278 com.google.protobuf.ByteString getFamily(); 7279 7280 // required bytes qualifier = 3; 7281 /** 7282 * <code>required bytes qualifier = 3;</code> 7283 */ hasQualifier()7284 boolean hasQualifier(); 7285 /** 7286 * <code>required bytes qualifier = 3;</code> 7287 */ getQualifier()7288 com.google.protobuf.ByteString getQualifier(); 7289 7290 // required .CompareType compare_type = 4; 7291 /** 7292 * <code>required .CompareType compare_type = 4;</code> 7293 */ hasCompareType()7294 boolean hasCompareType(); 7295 /** 7296 * <code>required .CompareType compare_type = 4;</code> 7297 */ getCompareType()7298 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType(); 7299 7300 // required .Comparator comparator = 5; 7301 /** 7302 * <code>required .Comparator comparator = 5;</code> 7303 */ hasComparator()7304 boolean hasComparator(); 7305 /** 7306 * <code>required .Comparator comparator = 5;</code> 7307 */ getComparator()7308 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); 7309 /** 7310 * <code>required .Comparator comparator = 5;</code> 7311 */ getComparatorOrBuilder()7312 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); 7313 } 7314 /** 7315 * Protobuf type {@code Condition} 7316 * 7317 * <pre> 7318 ** 7319 * Condition to check if the value of a given cell (row, 7320 * family, qualifier) matches a value via a given comparator. 7321 * 7322 * Condition is used in check and mutate operations. 7323 * </pre> 7324 */ 7325 public static final class Condition extends 7326 com.google.protobuf.GeneratedMessage 7327 implements ConditionOrBuilder { 7328 // Use Condition.newBuilder() to construct. Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder)7329 private Condition(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 7330 super(builder); 7331 this.unknownFields = builder.getUnknownFields(); 7332 } Condition(boolean noInit)7333 private Condition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 7334 7335 private static final Condition defaultInstance; getDefaultInstance()7336 public static Condition getDefaultInstance() { 7337 return defaultInstance; 7338 } 7339 getDefaultInstanceForType()7340 public Condition getDefaultInstanceForType() { 7341 return defaultInstance; 7342 } 7343 7344 private final com.google.protobuf.UnknownFieldSet unknownFields; 7345 @java.lang.Override 7346 public final com.google.protobuf.UnknownFieldSet getUnknownFields()7347 getUnknownFields() { 7348 return this.unknownFields; 7349 } Condition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7350 private Condition( 7351 com.google.protobuf.CodedInputStream input, 7352 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7353 throws com.google.protobuf.InvalidProtocolBufferException { 7354 initFields(); 7355 int mutable_bitField0_ = 0; 7356 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 7357 com.google.protobuf.UnknownFieldSet.newBuilder(); 7358 try { 7359 boolean done = false; 7360 while (!done) { 7361 int tag = input.readTag(); 7362 switch (tag) { 7363 case 0: 7364 done = true; 7365 break; 7366 default: { 7367 if (!parseUnknownField(input, unknownFields, 7368 extensionRegistry, tag)) { 7369 done = true; 7370 } 7371 break; 7372 } 7373 case 10: { 7374 bitField0_ |= 0x00000001; 7375 row_ = input.readBytes(); 7376 break; 7377 } 7378 case 18: { 7379 bitField0_ |= 0x00000002; 7380 family_ = input.readBytes(); 7381 break; 7382 } 7383 case 26: { 7384 bitField0_ |= 0x00000004; 7385 qualifier_ = input.readBytes(); 7386 break; 7387 } 7388 case 32: { 7389 int rawValue = input.readEnum(); 7390 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); 7391 if (value == null) { 7392 unknownFields.mergeVarintField(4, rawValue); 7393 } else { 7394 bitField0_ |= 0x00000008; 7395 compareType_ = value; 7396 } 7397 break; 7398 } 7399 case 42: { 7400 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; 7401 if (((bitField0_ & 0x00000010) == 0x00000010)) { 7402 subBuilder = comparator_.toBuilder(); 7403 } 7404 comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); 7405 if (subBuilder != null) { 7406 subBuilder.mergeFrom(comparator_); 7407 comparator_ = subBuilder.buildPartial(); 7408 } 7409 bitField0_ |= 0x00000010; 7410 break; 7411 } 7412 } 7413 } 7414 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7415 throw e.setUnfinishedMessage(this); 7416 } catch (java.io.IOException e) { 7417 throw new com.google.protobuf.InvalidProtocolBufferException( 7418 e.getMessage()).setUnfinishedMessage(this); 7419 } finally { 7420 this.unknownFields = unknownFields.build(); 7421 makeExtensionsImmutable(); 7422 } 7423 } 7424 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7425 getDescriptor() { 7426 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; 7427 } 7428 7429 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7430 internalGetFieldAccessorTable() { 7431 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable 7432 .ensureFieldAccessorsInitialized( 7433 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); 7434 } 7435 7436 public static com.google.protobuf.Parser<Condition> PARSER = 7437 new com.google.protobuf.AbstractParser<Condition>() { 7438 public Condition parsePartialFrom( 7439 com.google.protobuf.CodedInputStream input, 7440 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7441 throws com.google.protobuf.InvalidProtocolBufferException { 7442 return new Condition(input, extensionRegistry); 7443 } 7444 }; 7445 7446 @java.lang.Override getParserForType()7447 public com.google.protobuf.Parser<Condition> getParserForType() { 7448 return PARSER; 7449 } 7450 7451 private int bitField0_; 7452 // required bytes row = 1; 7453 public static final int ROW_FIELD_NUMBER = 1; 7454 private com.google.protobuf.ByteString row_; 7455 /** 7456 * <code>required bytes row = 1;</code> 7457 */ hasRow()7458 public boolean hasRow() { 7459 return ((bitField0_ & 0x00000001) == 0x00000001); 7460 } 7461 /** 7462 * <code>required bytes row = 1;</code> 7463 */ getRow()7464 public com.google.protobuf.ByteString getRow() { 7465 return row_; 7466 } 7467 7468 // required bytes family = 2; 7469 public static final int FAMILY_FIELD_NUMBER = 2; 7470 private com.google.protobuf.ByteString family_; 7471 /** 7472 * <code>required bytes family = 2;</code> 7473 */ hasFamily()7474 public boolean hasFamily() { 7475 return ((bitField0_ & 0x00000002) == 0x00000002); 7476 } 7477 /** 7478 * <code>required bytes family = 2;</code> 7479 */ getFamily()7480 public com.google.protobuf.ByteString getFamily() { 7481 return family_; 7482 } 7483 7484 // required bytes qualifier = 3; 7485 public static final int QUALIFIER_FIELD_NUMBER = 3; 7486 private com.google.protobuf.ByteString qualifier_; 7487 /** 7488 * <code>required bytes qualifier = 3;</code> 7489 */ hasQualifier()7490 public boolean hasQualifier() { 7491 return ((bitField0_ & 0x00000004) == 0x00000004); 7492 } 7493 /** 7494 * <code>required bytes qualifier = 3;</code> 7495 */ getQualifier()7496 public com.google.protobuf.ByteString getQualifier() { 7497 return qualifier_; 7498 } 7499 7500 // required .CompareType compare_type = 4; 7501 public static final int COMPARE_TYPE_FIELD_NUMBER = 4; 7502 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_; 7503 /** 7504 * <code>required .CompareType compare_type = 4;</code> 7505 */ hasCompareType()7506 public boolean hasCompareType() { 7507 return ((bitField0_ & 0x00000008) == 0x00000008); 7508 } 7509 /** 7510 * <code>required .CompareType compare_type = 4;</code> 7511 */ getCompareType()7512 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { 7513 return compareType_; 7514 } 7515 7516 // required .Comparator comparator = 5; 7517 public static final int COMPARATOR_FIELD_NUMBER = 5; 7518 private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; 7519 /** 7520 * <code>required .Comparator comparator = 5;</code> 7521 */ hasComparator()7522 public boolean hasComparator() { 7523 return ((bitField0_ & 0x00000010) == 0x00000010); 7524 } 7525 /** 7526 * <code>required .Comparator comparator = 5;</code> 7527 */ getComparator()7528 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { 7529 return comparator_; 7530 } 7531 /** 7532 * <code>required .Comparator comparator = 5;</code> 7533 */ getComparatorOrBuilder()7534 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { 7535 return comparator_; 7536 } 7537 initFields()7538 private void initFields() { 7539 row_ = com.google.protobuf.ByteString.EMPTY; 7540 family_ = com.google.protobuf.ByteString.EMPTY; 7541 qualifier_ = com.google.protobuf.ByteString.EMPTY; 7542 compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 7543 comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 7544 } 7545 private byte memoizedIsInitialized = -1; isInitialized()7546 public final boolean isInitialized() { 7547 byte isInitialized = memoizedIsInitialized; 7548 if (isInitialized != -1) return isInitialized == 1; 7549 7550 if (!hasRow()) { 7551 memoizedIsInitialized = 0; 7552 return false; 7553 } 7554 if (!hasFamily()) { 7555 memoizedIsInitialized = 0; 7556 return false; 7557 } 7558 if (!hasQualifier()) { 7559 memoizedIsInitialized = 0; 7560 return false; 7561 } 7562 if (!hasCompareType()) { 7563 memoizedIsInitialized = 0; 7564 return false; 7565 } 7566 if (!hasComparator()) { 7567 memoizedIsInitialized = 0; 7568 return false; 7569 } 7570 if (!getComparator().isInitialized()) { 7571 memoizedIsInitialized = 0; 7572 return false; 7573 } 7574 memoizedIsInitialized = 1; 7575 return true; 7576 } 7577 writeTo(com.google.protobuf.CodedOutputStream output)7578 public void writeTo(com.google.protobuf.CodedOutputStream output) 7579 throws java.io.IOException { 7580 getSerializedSize(); 7581 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7582 output.writeBytes(1, row_); 7583 } 7584 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7585 output.writeBytes(2, family_); 7586 } 7587 if (((bitField0_ & 0x00000004) == 0x00000004)) { 7588 output.writeBytes(3, qualifier_); 7589 } 7590 if (((bitField0_ & 0x00000008) == 0x00000008)) { 7591 output.writeEnum(4, compareType_.getNumber()); 7592 } 7593 if (((bitField0_ & 0x00000010) == 0x00000010)) { 7594 output.writeMessage(5, comparator_); 7595 } 7596 getUnknownFields().writeTo(output); 7597 } 7598 7599 private int memoizedSerializedSize = -1; getSerializedSize()7600 public int getSerializedSize() { 7601 int size = memoizedSerializedSize; 7602 if (size != -1) return size; 7603 7604 size = 0; 7605 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7606 size += com.google.protobuf.CodedOutputStream 7607 .computeBytesSize(1, row_); 7608 } 7609 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7610 size += com.google.protobuf.CodedOutputStream 7611 .computeBytesSize(2, family_); 7612 } 7613 if (((bitField0_ & 0x00000004) == 0x00000004)) { 7614 size += com.google.protobuf.CodedOutputStream 7615 .computeBytesSize(3, qualifier_); 7616 } 7617 if (((bitField0_ & 0x00000008) == 0x00000008)) { 7618 size += com.google.protobuf.CodedOutputStream 7619 .computeEnumSize(4, compareType_.getNumber()); 7620 } 7621 if (((bitField0_ & 0x00000010) == 0x00000010)) { 7622 size += com.google.protobuf.CodedOutputStream 7623 .computeMessageSize(5, comparator_); 7624 } 7625 size += getUnknownFields().getSerializedSize(); 7626 memoizedSerializedSize = size; 7627 return size; 7628 } 7629 7630 private static final long serialVersionUID = 0L; 7631 @java.lang.Override writeReplace()7632 protected java.lang.Object writeReplace() 7633 throws java.io.ObjectStreamException { 7634 return super.writeReplace(); 7635 } 7636 7637 @java.lang.Override equals(final java.lang.Object obj)7638 public boolean equals(final java.lang.Object obj) { 7639 if (obj == this) { 7640 return true; 7641 } 7642 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)) { 7643 return super.equals(obj); 7644 } 7645 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) obj; 7646 7647 boolean result = true; 7648 result = result && (hasRow() == other.hasRow()); 7649 if (hasRow()) { 7650 result = result && getRow() 7651 .equals(other.getRow()); 7652 } 7653 result = result && (hasFamily() == other.hasFamily()); 7654 if (hasFamily()) { 7655 result = result && getFamily() 7656 .equals(other.getFamily()); 7657 } 7658 result = result && (hasQualifier() == other.hasQualifier()); 7659 if (hasQualifier()) { 7660 result = result && getQualifier() 7661 .equals(other.getQualifier()); 7662 } 7663 result = result && (hasCompareType() == other.hasCompareType()); 7664 if (hasCompareType()) { 7665 result = result && 7666 (getCompareType() == other.getCompareType()); 7667 } 7668 result = result && (hasComparator() == other.hasComparator()); 7669 if (hasComparator()) { 7670 result = result && getComparator() 7671 .equals(other.getComparator()); 7672 } 7673 result = result && 7674 getUnknownFields().equals(other.getUnknownFields()); 7675 return result; 7676 } 7677 7678 private int memoizedHashCode = 0; 7679 @java.lang.Override hashCode()7680 public int hashCode() { 7681 if (memoizedHashCode != 0) { 7682 return memoizedHashCode; 7683 } 7684 int hash = 41; 7685 hash = (19 * hash) + getDescriptorForType().hashCode(); 7686 if (hasRow()) { 7687 hash = (37 * hash) + ROW_FIELD_NUMBER; 7688 hash = (53 * hash) + getRow().hashCode(); 7689 } 7690 if (hasFamily()) { 7691 hash = (37 * hash) + FAMILY_FIELD_NUMBER; 7692 hash = (53 * hash) + getFamily().hashCode(); 7693 } 7694 if (hasQualifier()) { 7695 hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; 7696 hash = (53 * hash) + getQualifier().hashCode(); 7697 } 7698 if (hasCompareType()) { 7699 hash = (37 * hash) + COMPARE_TYPE_FIELD_NUMBER; 7700 hash = (53 * hash) + hashEnum(getCompareType()); 7701 } 7702 if (hasComparator()) { 7703 hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; 7704 hash = (53 * hash) + getComparator().hashCode(); 7705 } 7706 hash = (29 * hash) + getUnknownFields().hashCode(); 7707 memoizedHashCode = hash; 7708 return hash; 7709 } 7710 parseFrom( com.google.protobuf.ByteString data)7711 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( 7712 com.google.protobuf.ByteString data) 7713 throws com.google.protobuf.InvalidProtocolBufferException { 7714 return PARSER.parseFrom(data); 7715 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7716 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( 7717 com.google.protobuf.ByteString data, 7718 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7719 throws com.google.protobuf.InvalidProtocolBufferException { 7720 return PARSER.parseFrom(data, extensionRegistry); 7721 } parseFrom(byte[] data)7722 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(byte[] data) 7723 throws com.google.protobuf.InvalidProtocolBufferException { 7724 return PARSER.parseFrom(data); 7725 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7726 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( 7727 byte[] data, 7728 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7729 throws com.google.protobuf.InvalidProtocolBufferException { 7730 return PARSER.parseFrom(data, extensionRegistry); 7731 } parseFrom(java.io.InputStream input)7732 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom(java.io.InputStream input) 7733 throws java.io.IOException { 7734 return PARSER.parseFrom(input); 7735 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7736 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( 7737 java.io.InputStream input, 7738 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7739 throws java.io.IOException { 7740 return PARSER.parseFrom(input, extensionRegistry); 7741 } parseDelimitedFrom(java.io.InputStream input)7742 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) 7743 throws java.io.IOException { 7744 return PARSER.parseDelimitedFrom(input); 7745 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7746 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseDelimitedFrom( 7747 java.io.InputStream input, 7748 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7749 throws java.io.IOException { 7750 return PARSER.parseDelimitedFrom(input, extensionRegistry); 7751 } parseFrom( com.google.protobuf.CodedInputStream input)7752 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( 7753 com.google.protobuf.CodedInputStream input) 7754 throws java.io.IOException { 7755 return PARSER.parseFrom(input); 7756 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7757 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parseFrom( 7758 com.google.protobuf.CodedInputStream input, 7759 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7760 throws java.io.IOException { 7761 return PARSER.parseFrom(input, extensionRegistry); 7762 } 7763 newBuilder()7764 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()7765 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype)7766 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition prototype) { 7767 return newBuilder().mergeFrom(prototype); 7768 } toBuilder()7769 public Builder toBuilder() { return newBuilder(this); } 7770 7771 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7772 protected Builder newBuilderForType( 7773 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7774 Builder builder = new Builder(parent); 7775 return builder; 7776 } 7777 /** 7778 * Protobuf type {@code Condition} 7779 * 7780 * <pre> 7781 ** 7782 * Condition to check if the value of a given cell (row, 7783 * family, qualifier) matches a value via a given comparator. 7784 * 7785 * Condition is used in check and mutate operations. 7786 * </pre> 7787 */ 7788 public static final class Builder extends 7789 com.google.protobuf.GeneratedMessage.Builder<Builder> 7790 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder { 7791 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7792 getDescriptor() { 7793 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; 7794 } 7795 7796 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7797 internalGetFieldAccessorTable() { 7798 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_fieldAccessorTable 7799 .ensureFieldAccessorsInitialized( 7800 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder.class); 7801 } 7802 7803 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder() Builder()7804 private Builder() { 7805 maybeForceBuilderInitialization(); 7806 } 7807 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7808 private Builder( 7809 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7810 super(parent); 7811 maybeForceBuilderInitialization(); 7812 } maybeForceBuilderInitialization()7813 private void maybeForceBuilderInitialization() { 7814 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7815 getComparatorFieldBuilder(); 7816 } 7817 } create()7818 private static Builder create() { 7819 return new Builder(); 7820 } 7821 clear()7822 public Builder clear() { 7823 super.clear(); 7824 row_ = com.google.protobuf.ByteString.EMPTY; 7825 bitField0_ = (bitField0_ & ~0x00000001); 7826 family_ = com.google.protobuf.ByteString.EMPTY; 7827 bitField0_ = (bitField0_ & ~0x00000002); 7828 qualifier_ = com.google.protobuf.ByteString.EMPTY; 7829 bitField0_ = (bitField0_ & ~0x00000004); 7830 compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 7831 bitField0_ = (bitField0_ & ~0x00000008); 7832 if (comparatorBuilder_ == null) { 7833 comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 7834 } else { 7835 comparatorBuilder_.clear(); 7836 } 7837 bitField0_ = (bitField0_ & ~0x00000010); 7838 return this; 7839 } 7840 clone()7841 public Builder clone() { 7842 return create().mergeFrom(buildPartial()); 7843 } 7844 7845 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7846 getDescriptorForType() { 7847 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Condition_descriptor; 7848 } 7849 getDefaultInstanceForType()7850 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getDefaultInstanceForType() { 7851 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); 7852 } 7853 build()7854 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition build() { 7855 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = buildPartial(); 7856 if (!result.isInitialized()) { 7857 throw newUninitializedMessageException(result); 7858 } 7859 return result; 7860 } 7861 buildPartial()7862 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition buildPartial() { 7863 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition(this); 7864 int from_bitField0_ = bitField0_; 7865 int to_bitField0_ = 0; 7866 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 7867 to_bitField0_ |= 0x00000001; 7868 } 7869 result.row_ = row_; 7870 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 7871 to_bitField0_ |= 0x00000002; 7872 } 7873 result.family_ = family_; 7874 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 7875 to_bitField0_ |= 0x00000004; 7876 } 7877 result.qualifier_ = qualifier_; 7878 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 7879 to_bitField0_ |= 0x00000008; 7880 } 7881 result.compareType_ = compareType_; 7882 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 7883 to_bitField0_ |= 0x00000010; 7884 } 7885 if (comparatorBuilder_ == null) { 7886 result.comparator_ = comparator_; 7887 } else { 7888 result.comparator_ = comparatorBuilder_.build(); 7889 } 7890 result.bitField0_ = to_bitField0_; 7891 onBuilt(); 7892 return result; 7893 } 7894 mergeFrom(com.google.protobuf.Message other)7895 public Builder mergeFrom(com.google.protobuf.Message other) { 7896 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) { 7897 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition)other); 7898 } else { 7899 super.mergeFrom(other); 7900 return this; 7901 } 7902 } 7903 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other)7904 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition other) { 7905 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) return this; 7906 if (other.hasRow()) { 7907 setRow(other.getRow()); 7908 } 7909 if (other.hasFamily()) { 7910 setFamily(other.getFamily()); 7911 } 7912 if (other.hasQualifier()) { 7913 setQualifier(other.getQualifier()); 7914 } 7915 if (other.hasCompareType()) { 7916 setCompareType(other.getCompareType()); 7917 } 7918 if (other.hasComparator()) { 7919 mergeComparator(other.getComparator()); 7920 } 7921 this.mergeUnknownFields(other.getUnknownFields()); 7922 return this; 7923 } 7924 isInitialized()7925 public final boolean isInitialized() { 7926 if (!hasRow()) { 7927 7928 return false; 7929 } 7930 if (!hasFamily()) { 7931 7932 return false; 7933 } 7934 if (!hasQualifier()) { 7935 7936 return false; 7937 } 7938 if (!hasCompareType()) { 7939 7940 return false; 7941 } 7942 if (!hasComparator()) { 7943 7944 return false; 7945 } 7946 if (!getComparator().isInitialized()) { 7947 7948 return false; 7949 } 7950 return true; 7951 } 7952 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7953 public Builder mergeFrom( 7954 com.google.protobuf.CodedInputStream input, 7955 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7956 throws java.io.IOException { 7957 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition parsedMessage = null; 7958 try { 7959 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 7960 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7961 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition) e.getUnfinishedMessage(); 7962 throw e; 7963 } finally { 7964 if (parsedMessage != null) { 7965 mergeFrom(parsedMessage); 7966 } 7967 } 7968 return this; 7969 } 7970 private int bitField0_; 7971 7972 // required bytes row = 1; 7973 private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; 7974 /** 7975 * <code>required bytes row = 1;</code> 7976 */ hasRow()7977 public boolean hasRow() { 7978 return ((bitField0_ & 0x00000001) == 0x00000001); 7979 } 7980 /** 7981 * <code>required bytes row = 1;</code> 7982 */ getRow()7983 public com.google.protobuf.ByteString getRow() { 7984 return row_; 7985 } 7986 /** 7987 * <code>required bytes row = 1;</code> 7988 */ setRow(com.google.protobuf.ByteString value)7989 public Builder setRow(com.google.protobuf.ByteString value) { 7990 if (value == null) { 7991 throw new NullPointerException(); 7992 } 7993 bitField0_ |= 0x00000001; 7994 row_ = value; 7995 onChanged(); 7996 return this; 7997 } 7998 /** 7999 * <code>required bytes row = 1;</code> 8000 */ clearRow()8001 public Builder clearRow() { 8002 bitField0_ = (bitField0_ & ~0x00000001); 8003 row_ = getDefaultInstance().getRow(); 8004 onChanged(); 8005 return this; 8006 } 8007 8008 // required bytes family = 2; 8009 private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; 8010 /** 8011 * <code>required bytes family = 2;</code> 8012 */ hasFamily()8013 public boolean hasFamily() { 8014 return ((bitField0_ & 0x00000002) == 0x00000002); 8015 } 8016 /** 8017 * <code>required bytes family = 2;</code> 8018 */ getFamily()8019 public com.google.protobuf.ByteString getFamily() { 8020 return family_; 8021 } 8022 /** 8023 * <code>required bytes family = 2;</code> 8024 */ setFamily(com.google.protobuf.ByteString value)8025 public Builder setFamily(com.google.protobuf.ByteString value) { 8026 if (value == null) { 8027 throw new NullPointerException(); 8028 } 8029 bitField0_ |= 0x00000002; 8030 family_ = value; 8031 onChanged(); 8032 return this; 8033 } 8034 /** 8035 * <code>required bytes family = 2;</code> 8036 */ clearFamily()8037 public Builder clearFamily() { 8038 bitField0_ = (bitField0_ & ~0x00000002); 8039 family_ = getDefaultInstance().getFamily(); 8040 onChanged(); 8041 return this; 8042 } 8043 8044 // required bytes qualifier = 3; 8045 private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; 8046 /** 8047 * <code>required bytes qualifier = 3;</code> 8048 */ hasQualifier()8049 public boolean hasQualifier() { 8050 return ((bitField0_ & 0x00000004) == 0x00000004); 8051 } 8052 /** 8053 * <code>required bytes qualifier = 3;</code> 8054 */ getQualifier()8055 public com.google.protobuf.ByteString getQualifier() { 8056 return qualifier_; 8057 } 8058 /** 8059 * <code>required bytes qualifier = 3;</code> 8060 */ setQualifier(com.google.protobuf.ByteString value)8061 public Builder setQualifier(com.google.protobuf.ByteString value) { 8062 if (value == null) { 8063 throw new NullPointerException(); 8064 } 8065 bitField0_ |= 0x00000004; 8066 qualifier_ = value; 8067 onChanged(); 8068 return this; 8069 } 8070 /** 8071 * <code>required bytes qualifier = 3;</code> 8072 */ clearQualifier()8073 public Builder clearQualifier() { 8074 bitField0_ = (bitField0_ & ~0x00000004); 8075 qualifier_ = getDefaultInstance().getQualifier(); 8076 onChanged(); 8077 return this; 8078 } 8079 8080 // required .CompareType compare_type = 4; 8081 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 8082 /** 8083 * <code>required .CompareType compare_type = 4;</code> 8084 */ hasCompareType()8085 public boolean hasCompareType() { 8086 return ((bitField0_ & 0x00000008) == 0x00000008); 8087 } 8088 /** 8089 * <code>required .CompareType compare_type = 4;</code> 8090 */ getCompareType()8091 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareType() { 8092 return compareType_; 8093 } 8094 /** 8095 * <code>required .CompareType compare_type = 4;</code> 8096 */ setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value)8097 public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { 8098 if (value == null) { 8099 throw new NullPointerException(); 8100 } 8101 bitField0_ |= 0x00000008; 8102 compareType_ = value; 8103 onChanged(); 8104 return this; 8105 } 8106 /** 8107 * <code>required .CompareType compare_type = 4;</code> 8108 */ clearCompareType()8109 public Builder clearCompareType() { 8110 bitField0_ = (bitField0_ & ~0x00000008); 8111 compareType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 8112 onChanged(); 8113 return this; 8114 } 8115 8116 // required .Comparator comparator = 5; 8117 private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 8118 private com.google.protobuf.SingleFieldBuilder< 8119 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; 8120 /** 8121 * <code>required .Comparator comparator = 5;</code> 8122 */ hasComparator()8123 public boolean hasComparator() { 8124 return ((bitField0_ & 0x00000010) == 0x00000010); 8125 } 8126 /** 8127 * <code>required .Comparator comparator = 5;</code> 8128 */ getComparator()8129 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { 8130 if (comparatorBuilder_ == null) { 8131 return comparator_; 8132 } else { 8133 return comparatorBuilder_.getMessage(); 8134 } 8135 } 8136 /** 8137 * <code>required .Comparator comparator = 5;</code> 8138 */ setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)8139 public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { 8140 if (comparatorBuilder_ == null) { 8141 if (value == null) { 8142 throw new NullPointerException(); 8143 } 8144 comparator_ = value; 8145 onChanged(); 8146 } else { 8147 comparatorBuilder_.setMessage(value); 8148 } 8149 bitField0_ |= 0x00000010; 8150 return this; 8151 } 8152 /** 8153 * <code>required .Comparator comparator = 5;</code> 8154 */ setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue)8155 public Builder setComparator( 8156 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { 8157 if (comparatorBuilder_ == null) { 8158 comparator_ = builderForValue.build(); 8159 onChanged(); 8160 } else { 8161 comparatorBuilder_.setMessage(builderForValue.build()); 8162 } 8163 bitField0_ |= 0x00000010; 8164 return this; 8165 } 8166 /** 8167 * <code>required .Comparator comparator = 5;</code> 8168 */ mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)8169 public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { 8170 if (comparatorBuilder_ == null) { 8171 if (((bitField0_ & 0x00000010) == 0x00000010) && 8172 comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { 8173 comparator_ = 8174 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); 8175 } else { 8176 comparator_ = value; 8177 } 8178 onChanged(); 8179 } else { 8180 comparatorBuilder_.mergeFrom(value); 8181 } 8182 bitField0_ |= 0x00000010; 8183 return this; 8184 } 8185 /** 8186 * <code>required .Comparator comparator = 5;</code> 8187 */ clearComparator()8188 public Builder clearComparator() { 8189 if (comparatorBuilder_ == null) { 8190 comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 8191 onChanged(); 8192 } else { 8193 comparatorBuilder_.clear(); 8194 } 8195 bitField0_ = (bitField0_ & ~0x00000010); 8196 return this; 8197 } 8198 /** 8199 * <code>required .Comparator comparator = 5;</code> 8200 */ getComparatorBuilder()8201 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { 8202 bitField0_ |= 0x00000010; 8203 onChanged(); 8204 return getComparatorFieldBuilder().getBuilder(); 8205 } 8206 /** 8207 * <code>required .Comparator comparator = 5;</code> 8208 */ getComparatorOrBuilder()8209 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { 8210 if (comparatorBuilder_ != null) { 8211 return comparatorBuilder_.getMessageOrBuilder(); 8212 } else { 8213 return comparator_; 8214 } 8215 } 8216 /** 8217 * <code>required .Comparator comparator = 5;</code> 8218 */ 8219 private com.google.protobuf.SingleFieldBuilder< 8220 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder()8221 getComparatorFieldBuilder() { 8222 if (comparatorBuilder_ == null) { 8223 comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< 8224 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( 8225 comparator_, 8226 getParentForChildren(), 8227 isClean()); 8228 comparator_ = null; 8229 } 8230 return comparatorBuilder_; 8231 } 8232 8233 // @@protoc_insertion_point(builder_scope:Condition) 8234 } 8235 8236 static { 8237 defaultInstance = new Condition(true); defaultInstance.initFields()8238 defaultInstance.initFields(); 8239 } 8240 8241 // @@protoc_insertion_point(class_scope:Condition) 8242 } 8243 8244 public interface MutationProtoOrBuilder 8245 extends com.google.protobuf.MessageOrBuilder { 8246 8247 // optional bytes row = 1; 8248 /** 8249 * <code>optional bytes row = 1;</code> 8250 */ hasRow()8251 boolean hasRow(); 8252 /** 8253 * <code>optional bytes row = 1;</code> 8254 */ getRow()8255 com.google.protobuf.ByteString getRow(); 8256 8257 // optional .MutationProto.MutationType mutate_type = 2; 8258 /** 8259 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 8260 */ hasMutateType()8261 boolean hasMutateType(); 8262 /** 8263 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 8264 */ getMutateType()8265 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType(); 8266 8267 // repeated .MutationProto.ColumnValue column_value = 3; 8268 /** 8269 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 8270 */ 8271 java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList()8272 getColumnValueList(); 8273 /** 8274 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 8275 */ getColumnValue(int index)8276 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index); 8277 /** 8278 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 8279 */ getColumnValueCount()8280 int getColumnValueCount(); 8281 /** 8282 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 8283 */ 8284 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueOrBuilderList()8285 getColumnValueOrBuilderList(); 8286 /** 8287 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 8288 */ getColumnValueOrBuilder( int index)8289 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( 8290 int index); 8291 8292 // optional uint64 timestamp = 4; 8293 /** 8294 * <code>optional uint64 timestamp = 4;</code> 8295 */ hasTimestamp()8296 boolean hasTimestamp(); 8297 /** 8298 * <code>optional uint64 timestamp = 4;</code> 8299 */ getTimestamp()8300 long getTimestamp(); 8301 8302 // repeated .NameBytesPair attribute = 5; 8303 /** 8304 * <code>repeated .NameBytesPair attribute = 5;</code> 8305 */ 8306 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList()8307 getAttributeList(); 8308 /** 8309 * <code>repeated .NameBytesPair attribute = 5;</code> 8310 */ getAttribute(int index)8311 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); 8312 /** 8313 * <code>repeated .NameBytesPair attribute = 5;</code> 8314 */ getAttributeCount()8315 int getAttributeCount(); 8316 /** 8317 * <code>repeated .NameBytesPair attribute = 5;</code> 8318 */ 8319 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()8320 getAttributeOrBuilderList(); 8321 /** 8322 * <code>repeated .NameBytesPair attribute = 5;</code> 8323 */ getAttributeOrBuilder( int index)8324 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 8325 int index); 8326 8327 // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT]; 8328 /** 8329 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 8330 */ hasDurability()8331 boolean hasDurability(); 8332 /** 8333 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 8334 */ getDurability()8335 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability(); 8336 8337 // optional .TimeRange time_range = 7; 8338 /** 8339 * <code>optional .TimeRange time_range = 7;</code> 8340 * 8341 * <pre> 8342 * For some mutations, a result may be returned, in which case, 8343 * time range can be specified for potential performance gain 8344 * </pre> 8345 */ hasTimeRange()8346 boolean hasTimeRange(); 8347 /** 8348 * <code>optional .TimeRange time_range = 7;</code> 8349 * 8350 * <pre> 8351 * For some mutations, a result may be returned, in which case, 8352 * time range can be specified for potential performance gain 8353 * </pre> 8354 */ getTimeRange()8355 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); 8356 /** 8357 * <code>optional .TimeRange time_range = 7;</code> 8358 * 8359 * <pre> 8360 * For some mutations, a result may be returned, in which case, 8361 * time range can be specified for potential performance gain 8362 * </pre> 8363 */ getTimeRangeOrBuilder()8364 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); 8365 8366 // optional int32 associated_cell_count = 8; 8367 /** 8368 * <code>optional int32 associated_cell_count = 8;</code> 8369 * 8370 * <pre> 8371 * The below count is set when the associated cells are NOT 8372 * part of this protobuf message; they are passed alongside 8373 * and then this Message is a placeholder with metadata. The 8374 * count is needed to know how many to peel off the block of Cells as 8375 * ours. NOTE: This is different from the pb managed cell_count of the 8376 * 'cell' field above which is non-null when the cells are pb'd. 8377 * </pre> 8378 */ hasAssociatedCellCount()8379 boolean hasAssociatedCellCount(); 8380 /** 8381 * <code>optional int32 associated_cell_count = 8;</code> 8382 * 8383 * <pre> 8384 * The below count is set when the associated cells are NOT 8385 * part of this protobuf message; they are passed alongside 8386 * and then this Message is a placeholder with metadata. The 8387 * count is needed to know how many to peel off the block of Cells as 8388 * ours. NOTE: This is different from the pb managed cell_count of the 8389 * 'cell' field above which is non-null when the cells are pb'd. 8390 * </pre> 8391 */ getAssociatedCellCount()8392 int getAssociatedCellCount(); 8393 8394 // optional uint64 nonce = 9; 8395 /** 8396 * <code>optional uint64 nonce = 9;</code> 8397 */ hasNonce()8398 boolean hasNonce(); 8399 /** 8400 * <code>optional uint64 nonce = 9;</code> 8401 */ getNonce()8402 long getNonce(); 8403 } 8404 /** 8405 * Protobuf type {@code MutationProto} 8406 * 8407 * <pre> 8408 ** 8409 * A specific mutation inside a mutate request. 8410 * It can be an append, increment, put or delete based 8411 * on the mutation type. It can be fully filled in or 8412 * only metadata present because data is being carried 8413 * elsewhere outside of pb. 8414 * </pre> 8415 */ 8416 public static final class MutationProto extends 8417 com.google.protobuf.GeneratedMessage 8418 implements MutationProtoOrBuilder { 8419 // Use MutationProto.newBuilder() to construct. MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder)8420 private MutationProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8421 super(builder); 8422 this.unknownFields = builder.getUnknownFields(); 8423 } MutationProto(boolean noInit)8424 private MutationProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8425 8426 private static final MutationProto defaultInstance; getDefaultInstance()8427 public static MutationProto getDefaultInstance() { 8428 return defaultInstance; 8429 } 8430 getDefaultInstanceForType()8431 public MutationProto getDefaultInstanceForType() { 8432 return defaultInstance; 8433 } 8434 8435 private final com.google.protobuf.UnknownFieldSet unknownFields; 8436 @java.lang.Override 8437 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8438 getUnknownFields() { 8439 return this.unknownFields; 8440 } MutationProto( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8441 private MutationProto( 8442 com.google.protobuf.CodedInputStream input, 8443 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8444 throws com.google.protobuf.InvalidProtocolBufferException { 8445 initFields(); 8446 int mutable_bitField0_ = 0; 8447 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8448 com.google.protobuf.UnknownFieldSet.newBuilder(); 8449 try { 8450 boolean done = false; 8451 while (!done) { 8452 int tag = input.readTag(); 8453 switch (tag) { 8454 case 0: 8455 done = true; 8456 break; 8457 default: { 8458 if (!parseUnknownField(input, unknownFields, 8459 extensionRegistry, tag)) { 8460 done = true; 8461 } 8462 break; 8463 } 8464 case 10: { 8465 bitField0_ |= 0x00000001; 8466 row_ = input.readBytes(); 8467 break; 8468 } 8469 case 16: { 8470 int rawValue = input.readEnum(); 8471 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.valueOf(rawValue); 8472 if (value == null) { 8473 unknownFields.mergeVarintField(2, rawValue); 8474 } else { 8475 bitField0_ |= 0x00000002; 8476 mutateType_ = value; 8477 } 8478 break; 8479 } 8480 case 26: { 8481 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 8482 columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(); 8483 mutable_bitField0_ |= 0x00000004; 8484 } 8485 columnValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.PARSER, extensionRegistry)); 8486 break; 8487 } 8488 case 32: { 8489 bitField0_ |= 0x00000004; 8490 timestamp_ = input.readUInt64(); 8491 break; 8492 } 8493 case 42: { 8494 if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { 8495 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(); 8496 mutable_bitField0_ |= 0x00000010; 8497 } 8498 attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); 8499 break; 8500 } 8501 case 48: { 8502 int rawValue = input.readEnum(); 8503 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.valueOf(rawValue); 8504 if (value == null) { 8505 unknownFields.mergeVarintField(6, rawValue); 8506 } else { 8507 bitField0_ |= 0x00000008; 8508 durability_ = value; 8509 } 8510 break; 8511 } 8512 case 58: { 8513 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; 8514 if (((bitField0_ & 0x00000010) == 0x00000010)) { 8515 subBuilder = timeRange_.toBuilder(); 8516 } 8517 timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); 8518 if (subBuilder != null) { 8519 subBuilder.mergeFrom(timeRange_); 8520 timeRange_ = subBuilder.buildPartial(); 8521 } 8522 bitField0_ |= 0x00000010; 8523 break; 8524 } 8525 case 64: { 8526 bitField0_ |= 0x00000020; 8527 associatedCellCount_ = input.readInt32(); 8528 break; 8529 } 8530 case 72: { 8531 bitField0_ |= 0x00000040; 8532 nonce_ = input.readUInt64(); 8533 break; 8534 } 8535 } 8536 } 8537 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8538 throw e.setUnfinishedMessage(this); 8539 } catch (java.io.IOException e) { 8540 throw new com.google.protobuf.InvalidProtocolBufferException( 8541 e.getMessage()).setUnfinishedMessage(this); 8542 } finally { 8543 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 8544 columnValue_ = java.util.Collections.unmodifiableList(columnValue_); 8545 } 8546 if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { 8547 attribute_ = java.util.Collections.unmodifiableList(attribute_); 8548 } 8549 this.unknownFields = unknownFields.build(); 8550 makeExtensionsImmutable(); 8551 } 8552 } 8553 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8554 getDescriptor() { 8555 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor; 8556 } 8557 8558 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8559 internalGetFieldAccessorTable() { 8560 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable 8561 .ensureFieldAccessorsInitialized( 8562 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class); 8563 } 8564 8565 public static com.google.protobuf.Parser<MutationProto> PARSER = 8566 new com.google.protobuf.AbstractParser<MutationProto>() { 8567 public MutationProto parsePartialFrom( 8568 com.google.protobuf.CodedInputStream input, 8569 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8570 throws com.google.protobuf.InvalidProtocolBufferException { 8571 return new MutationProto(input, extensionRegistry); 8572 } 8573 }; 8574 8575 @java.lang.Override getParserForType()8576 public com.google.protobuf.Parser<MutationProto> getParserForType() { 8577 return PARSER; 8578 } 8579 8580 /** 8581 * Protobuf enum {@code MutationProto.Durability} 8582 */ 8583 public enum Durability 8584 implements com.google.protobuf.ProtocolMessageEnum { 8585 /** 8586 * <code>USE_DEFAULT = 0;</code> 8587 */ 8588 USE_DEFAULT(0, 0), 8589 /** 8590 * <code>SKIP_WAL = 1;</code> 8591 */ 8592 SKIP_WAL(1, 1), 8593 /** 8594 * <code>ASYNC_WAL = 2;</code> 8595 */ 8596 ASYNC_WAL(2, 2), 8597 /** 8598 * <code>SYNC_WAL = 3;</code> 8599 */ 8600 SYNC_WAL(3, 3), 8601 /** 8602 * <code>FSYNC_WAL = 4;</code> 8603 */ 8604 FSYNC_WAL(4, 4), 8605 ; 8606 8607 /** 8608 * <code>USE_DEFAULT = 0;</code> 8609 */ 8610 public static final int USE_DEFAULT_VALUE = 0; 8611 /** 8612 * <code>SKIP_WAL = 1;</code> 8613 */ 8614 public static final int SKIP_WAL_VALUE = 1; 8615 /** 8616 * <code>ASYNC_WAL = 2;</code> 8617 */ 8618 public static final int ASYNC_WAL_VALUE = 2; 8619 /** 8620 * <code>SYNC_WAL = 3;</code> 8621 */ 8622 public static final int SYNC_WAL_VALUE = 3; 8623 /** 8624 * <code>FSYNC_WAL = 4;</code> 8625 */ 8626 public static final int FSYNC_WAL_VALUE = 4; 8627 8628 getNumber()8629 public final int getNumber() { return value; } 8630 valueOf(int value)8631 public static Durability valueOf(int value) { 8632 switch (value) { 8633 case 0: return USE_DEFAULT; 8634 case 1: return SKIP_WAL; 8635 case 2: return ASYNC_WAL; 8636 case 3: return SYNC_WAL; 8637 case 4: return FSYNC_WAL; 8638 default: return null; 8639 } 8640 } 8641 8642 public static com.google.protobuf.Internal.EnumLiteMap<Durability> internalGetValueMap()8643 internalGetValueMap() { 8644 return internalValueMap; 8645 } 8646 private static com.google.protobuf.Internal.EnumLiteMap<Durability> 8647 internalValueMap = 8648 new com.google.protobuf.Internal.EnumLiteMap<Durability>() { 8649 public Durability findValueByNumber(int number) { 8650 return Durability.valueOf(number); 8651 } 8652 }; 8653 8654 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()8655 getValueDescriptor() { 8656 return getDescriptor().getValues().get(index); 8657 } 8658 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()8659 getDescriptorForType() { 8660 return getDescriptor(); 8661 } 8662 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()8663 getDescriptor() { 8664 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(0); 8665 } 8666 8667 private static final Durability[] VALUES = values(); 8668 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)8669 public static Durability valueOf( 8670 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 8671 if (desc.getType() != getDescriptor()) { 8672 throw new java.lang.IllegalArgumentException( 8673 "EnumValueDescriptor is not for this type."); 8674 } 8675 return VALUES[desc.getIndex()]; 8676 } 8677 8678 private final int index; 8679 private final int value; 8680 Durability(int index, int value)8681 private Durability(int index, int value) { 8682 this.index = index; 8683 this.value = value; 8684 } 8685 8686 // @@protoc_insertion_point(enum_scope:MutationProto.Durability) 8687 } 8688 8689 /** 8690 * Protobuf enum {@code MutationProto.MutationType} 8691 */ 8692 public enum MutationType 8693 implements com.google.protobuf.ProtocolMessageEnum { 8694 /** 8695 * <code>APPEND = 0;</code> 8696 */ 8697 APPEND(0, 0), 8698 /** 8699 * <code>INCREMENT = 1;</code> 8700 */ 8701 INCREMENT(1, 1), 8702 /** 8703 * <code>PUT = 2;</code> 8704 */ 8705 PUT(2, 2), 8706 /** 8707 * <code>DELETE = 3;</code> 8708 */ 8709 DELETE(3, 3), 8710 ; 8711 8712 /** 8713 * <code>APPEND = 0;</code> 8714 */ 8715 public static final int APPEND_VALUE = 0; 8716 /** 8717 * <code>INCREMENT = 1;</code> 8718 */ 8719 public static final int INCREMENT_VALUE = 1; 8720 /** 8721 * <code>PUT = 2;</code> 8722 */ 8723 public static final int PUT_VALUE = 2; 8724 /** 8725 * <code>DELETE = 3;</code> 8726 */ 8727 public static final int DELETE_VALUE = 3; 8728 8729 getNumber()8730 public final int getNumber() { return value; } 8731 valueOf(int value)8732 public static MutationType valueOf(int value) { 8733 switch (value) { 8734 case 0: return APPEND; 8735 case 1: return INCREMENT; 8736 case 2: return PUT; 8737 case 3: return DELETE; 8738 default: return null; 8739 } 8740 } 8741 8742 public static com.google.protobuf.Internal.EnumLiteMap<MutationType> internalGetValueMap()8743 internalGetValueMap() { 8744 return internalValueMap; 8745 } 8746 private static com.google.protobuf.Internal.EnumLiteMap<MutationType> 8747 internalValueMap = 8748 new com.google.protobuf.Internal.EnumLiteMap<MutationType>() { 8749 public MutationType findValueByNumber(int number) { 8750 return MutationType.valueOf(number); 8751 } 8752 }; 8753 8754 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()8755 getValueDescriptor() { 8756 return getDescriptor().getValues().get(index); 8757 } 8758 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()8759 getDescriptorForType() { 8760 return getDescriptor(); 8761 } 8762 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()8763 getDescriptor() { 8764 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(1); 8765 } 8766 8767 private static final MutationType[] VALUES = values(); 8768 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)8769 public static MutationType valueOf( 8770 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 8771 if (desc.getType() != getDescriptor()) { 8772 throw new java.lang.IllegalArgumentException( 8773 "EnumValueDescriptor is not for this type."); 8774 } 8775 return VALUES[desc.getIndex()]; 8776 } 8777 8778 private final int index; 8779 private final int value; 8780 MutationType(int index, int value)8781 private MutationType(int index, int value) { 8782 this.index = index; 8783 this.value = value; 8784 } 8785 8786 // @@protoc_insertion_point(enum_scope:MutationProto.MutationType) 8787 } 8788 8789 /** 8790 * Protobuf enum {@code MutationProto.DeleteType} 8791 */ 8792 public enum DeleteType 8793 implements com.google.protobuf.ProtocolMessageEnum { 8794 /** 8795 * <code>DELETE_ONE_VERSION = 0;</code> 8796 */ 8797 DELETE_ONE_VERSION(0, 0), 8798 /** 8799 * <code>DELETE_MULTIPLE_VERSIONS = 1;</code> 8800 */ 8801 DELETE_MULTIPLE_VERSIONS(1, 1), 8802 /** 8803 * <code>DELETE_FAMILY = 2;</code> 8804 */ 8805 DELETE_FAMILY(2, 2), 8806 /** 8807 * <code>DELETE_FAMILY_VERSION = 3;</code> 8808 */ 8809 DELETE_FAMILY_VERSION(3, 3), 8810 ; 8811 8812 /** 8813 * <code>DELETE_ONE_VERSION = 0;</code> 8814 */ 8815 public static final int DELETE_ONE_VERSION_VALUE = 0; 8816 /** 8817 * <code>DELETE_MULTIPLE_VERSIONS = 1;</code> 8818 */ 8819 public static final int DELETE_MULTIPLE_VERSIONS_VALUE = 1; 8820 /** 8821 * <code>DELETE_FAMILY = 2;</code> 8822 */ 8823 public static final int DELETE_FAMILY_VALUE = 2; 8824 /** 8825 * <code>DELETE_FAMILY_VERSION = 3;</code> 8826 */ 8827 public static final int DELETE_FAMILY_VERSION_VALUE = 3; 8828 8829 getNumber()8830 public final int getNumber() { return value; } 8831 valueOf(int value)8832 public static DeleteType valueOf(int value) { 8833 switch (value) { 8834 case 0: return DELETE_ONE_VERSION; 8835 case 1: return DELETE_MULTIPLE_VERSIONS; 8836 case 2: return DELETE_FAMILY; 8837 case 3: return DELETE_FAMILY_VERSION; 8838 default: return null; 8839 } 8840 } 8841 8842 public static com.google.protobuf.Internal.EnumLiteMap<DeleteType> internalGetValueMap()8843 internalGetValueMap() { 8844 return internalValueMap; 8845 } 8846 private static com.google.protobuf.Internal.EnumLiteMap<DeleteType> 8847 internalValueMap = 8848 new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() { 8849 public DeleteType findValueByNumber(int number) { 8850 return DeleteType.valueOf(number); 8851 } 8852 }; 8853 8854 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()8855 getValueDescriptor() { 8856 return getDescriptor().getValues().get(index); 8857 } 8858 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()8859 getDescriptorForType() { 8860 return getDescriptor(); 8861 } 8862 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()8863 getDescriptor() { 8864 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDescriptor().getEnumTypes().get(2); 8865 } 8866 8867 private static final DeleteType[] VALUES = values(); 8868 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)8869 public static DeleteType valueOf( 8870 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 8871 if (desc.getType() != getDescriptor()) { 8872 throw new java.lang.IllegalArgumentException( 8873 "EnumValueDescriptor is not for this type."); 8874 } 8875 return VALUES[desc.getIndex()]; 8876 } 8877 8878 private final int index; 8879 private final int value; 8880 DeleteType(int index, int value)8881 private DeleteType(int index, int value) { 8882 this.index = index; 8883 this.value = value; 8884 } 8885 8886 // @@protoc_insertion_point(enum_scope:MutationProto.DeleteType) 8887 } 8888 8889 public interface ColumnValueOrBuilder 8890 extends com.google.protobuf.MessageOrBuilder { 8891 8892 // required bytes family = 1; 8893 /** 8894 * <code>required bytes family = 1;</code> 8895 */ hasFamily()8896 boolean hasFamily(); 8897 /** 8898 * <code>required bytes family = 1;</code> 8899 */ getFamily()8900 com.google.protobuf.ByteString getFamily(); 8901 8902 // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2; 8903 /** 8904 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 8905 */ 8906 java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList()8907 getQualifierValueList(); 8908 /** 8909 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 8910 */ getQualifierValue(int index)8911 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index); 8912 /** 8913 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 8914 */ getQualifierValueCount()8915 int getQualifierValueCount(); 8916 /** 8917 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 8918 */ 8919 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueOrBuilderList()8920 getQualifierValueOrBuilderList(); 8921 /** 8922 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 8923 */ getQualifierValueOrBuilder( int index)8924 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( 8925 int index); 8926 } 8927 /** 8928 * Protobuf type {@code MutationProto.ColumnValue} 8929 */ 8930 public static final class ColumnValue extends 8931 com.google.protobuf.GeneratedMessage 8932 implements ColumnValueOrBuilder { 8933 // Use ColumnValue.newBuilder() to construct. ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder)8934 private ColumnValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8935 super(builder); 8936 this.unknownFields = builder.getUnknownFields(); 8937 } ColumnValue(boolean noInit)8938 private ColumnValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8939 8940 private static final ColumnValue defaultInstance; getDefaultInstance()8941 public static ColumnValue getDefaultInstance() { 8942 return defaultInstance; 8943 } 8944 getDefaultInstanceForType()8945 public ColumnValue getDefaultInstanceForType() { 8946 return defaultInstance; 8947 } 8948 8949 private final com.google.protobuf.UnknownFieldSet unknownFields; 8950 @java.lang.Override 8951 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8952 getUnknownFields() { 8953 return this.unknownFields; 8954 } ColumnValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8955 private ColumnValue( 8956 com.google.protobuf.CodedInputStream input, 8957 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8958 throws com.google.protobuf.InvalidProtocolBufferException { 8959 initFields(); 8960 int mutable_bitField0_ = 0; 8961 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8962 com.google.protobuf.UnknownFieldSet.newBuilder(); 8963 try { 8964 boolean done = false; 8965 while (!done) { 8966 int tag = input.readTag(); 8967 switch (tag) { 8968 case 0: 8969 done = true; 8970 break; 8971 default: { 8972 if (!parseUnknownField(input, unknownFields, 8973 extensionRegistry, tag)) { 8974 done = true; 8975 } 8976 break; 8977 } 8978 case 10: { 8979 bitField0_ |= 0x00000001; 8980 family_ = input.readBytes(); 8981 break; 8982 } 8983 case 18: { 8984 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 8985 qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(); 8986 mutable_bitField0_ |= 0x00000002; 8987 } 8988 qualifierValue_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.PARSER, extensionRegistry)); 8989 break; 8990 } 8991 } 8992 } 8993 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8994 throw e.setUnfinishedMessage(this); 8995 } catch (java.io.IOException e) { 8996 throw new com.google.protobuf.InvalidProtocolBufferException( 8997 e.getMessage()).setUnfinishedMessage(this); 8998 } finally { 8999 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 9000 qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); 9001 } 9002 this.unknownFields = unknownFields.build(); 9003 makeExtensionsImmutable(); 9004 } 9005 } 9006 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9007 getDescriptor() { 9008 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor; 9009 } 9010 9011 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9012 internalGetFieldAccessorTable() { 9013 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable 9014 .ensureFieldAccessorsInitialized( 9015 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); 9016 } 9017 9018 public static com.google.protobuf.Parser<ColumnValue> PARSER = 9019 new com.google.protobuf.AbstractParser<ColumnValue>() { 9020 public ColumnValue parsePartialFrom( 9021 com.google.protobuf.CodedInputStream input, 9022 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9023 throws com.google.protobuf.InvalidProtocolBufferException { 9024 return new ColumnValue(input, extensionRegistry); 9025 } 9026 }; 9027 9028 @java.lang.Override getParserForType()9029 public com.google.protobuf.Parser<ColumnValue> getParserForType() { 9030 return PARSER; 9031 } 9032 9033 public interface QualifierValueOrBuilder 9034 extends com.google.protobuf.MessageOrBuilder { 9035 9036 // optional bytes qualifier = 1; 9037 /** 9038 * <code>optional bytes qualifier = 1;</code> 9039 */ hasQualifier()9040 boolean hasQualifier(); 9041 /** 9042 * <code>optional bytes qualifier = 1;</code> 9043 */ getQualifier()9044 com.google.protobuf.ByteString getQualifier(); 9045 9046 // optional bytes value = 2; 9047 /** 9048 * <code>optional bytes value = 2;</code> 9049 */ hasValue()9050 boolean hasValue(); 9051 /** 9052 * <code>optional bytes value = 2;</code> 9053 */ getValue()9054 com.google.protobuf.ByteString getValue(); 9055 9056 // optional uint64 timestamp = 3; 9057 /** 9058 * <code>optional uint64 timestamp = 3;</code> 9059 */ hasTimestamp()9060 boolean hasTimestamp(); 9061 /** 9062 * <code>optional uint64 timestamp = 3;</code> 9063 */ getTimestamp()9064 long getTimestamp(); 9065 9066 // optional .MutationProto.DeleteType delete_type = 4; 9067 /** 9068 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9069 */ hasDeleteType()9070 boolean hasDeleteType(); 9071 /** 9072 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9073 */ getDeleteType()9074 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType(); 9075 9076 // optional bytes tags = 5; 9077 /** 9078 * <code>optional bytes tags = 5;</code> 9079 */ hasTags()9080 boolean hasTags(); 9081 /** 9082 * <code>optional bytes tags = 5;</code> 9083 */ getTags()9084 com.google.protobuf.ByteString getTags(); 9085 } 9086 /** 9087 * Protobuf type {@code MutationProto.ColumnValue.QualifierValue} 9088 */ 9089 public static final class QualifierValue extends 9090 com.google.protobuf.GeneratedMessage 9091 implements QualifierValueOrBuilder { 9092 // Use QualifierValue.newBuilder() to construct. QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder)9093 private QualifierValue(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 9094 super(builder); 9095 this.unknownFields = builder.getUnknownFields(); 9096 } QualifierValue(boolean noInit)9097 private QualifierValue(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 9098 9099 private static final QualifierValue defaultInstance; getDefaultInstance()9100 public static QualifierValue getDefaultInstance() { 9101 return defaultInstance; 9102 } 9103 getDefaultInstanceForType()9104 public QualifierValue getDefaultInstanceForType() { 9105 return defaultInstance; 9106 } 9107 9108 private final com.google.protobuf.UnknownFieldSet unknownFields; 9109 @java.lang.Override 9110 public final com.google.protobuf.UnknownFieldSet getUnknownFields()9111 getUnknownFields() { 9112 return this.unknownFields; 9113 } QualifierValue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9114 private QualifierValue( 9115 com.google.protobuf.CodedInputStream input, 9116 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9117 throws com.google.protobuf.InvalidProtocolBufferException { 9118 initFields(); 9119 int mutable_bitField0_ = 0; 9120 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 9121 com.google.protobuf.UnknownFieldSet.newBuilder(); 9122 try { 9123 boolean done = false; 9124 while (!done) { 9125 int tag = input.readTag(); 9126 switch (tag) { 9127 case 0: 9128 done = true; 9129 break; 9130 default: { 9131 if (!parseUnknownField(input, unknownFields, 9132 extensionRegistry, tag)) { 9133 done = true; 9134 } 9135 break; 9136 } 9137 case 10: { 9138 bitField0_ |= 0x00000001; 9139 qualifier_ = input.readBytes(); 9140 break; 9141 } 9142 case 18: { 9143 bitField0_ |= 0x00000002; 9144 value_ = input.readBytes(); 9145 break; 9146 } 9147 case 24: { 9148 bitField0_ |= 0x00000004; 9149 timestamp_ = input.readUInt64(); 9150 break; 9151 } 9152 case 32: { 9153 int rawValue = input.readEnum(); 9154 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.valueOf(rawValue); 9155 if (value == null) { 9156 unknownFields.mergeVarintField(4, rawValue); 9157 } else { 9158 bitField0_ |= 0x00000008; 9159 deleteType_ = value; 9160 } 9161 break; 9162 } 9163 case 42: { 9164 bitField0_ |= 0x00000010; 9165 tags_ = input.readBytes(); 9166 break; 9167 } 9168 } 9169 } 9170 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9171 throw e.setUnfinishedMessage(this); 9172 } catch (java.io.IOException e) { 9173 throw new com.google.protobuf.InvalidProtocolBufferException( 9174 e.getMessage()).setUnfinishedMessage(this); 9175 } finally { 9176 this.unknownFields = unknownFields.build(); 9177 makeExtensionsImmutable(); 9178 } 9179 } 9180 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9181 getDescriptor() { 9182 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor; 9183 } 9184 9185 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9186 internalGetFieldAccessorTable() { 9187 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable 9188 .ensureFieldAccessorsInitialized( 9189 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); 9190 } 9191 9192 public static com.google.protobuf.Parser<QualifierValue> PARSER = 9193 new com.google.protobuf.AbstractParser<QualifierValue>() { 9194 public QualifierValue parsePartialFrom( 9195 com.google.protobuf.CodedInputStream input, 9196 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9197 throws com.google.protobuf.InvalidProtocolBufferException { 9198 return new QualifierValue(input, extensionRegistry); 9199 } 9200 }; 9201 9202 @java.lang.Override getParserForType()9203 public com.google.protobuf.Parser<QualifierValue> getParserForType() { 9204 return PARSER; 9205 } 9206 9207 private int bitField0_; 9208 // optional bytes qualifier = 1; 9209 public static final int QUALIFIER_FIELD_NUMBER = 1; 9210 private com.google.protobuf.ByteString qualifier_; 9211 /** 9212 * <code>optional bytes qualifier = 1;</code> 9213 */ hasQualifier()9214 public boolean hasQualifier() { 9215 return ((bitField0_ & 0x00000001) == 0x00000001); 9216 } 9217 /** 9218 * <code>optional bytes qualifier = 1;</code> 9219 */ getQualifier()9220 public com.google.protobuf.ByteString getQualifier() { 9221 return qualifier_; 9222 } 9223 9224 // optional bytes value = 2; 9225 public static final int VALUE_FIELD_NUMBER = 2; 9226 private com.google.protobuf.ByteString value_; 9227 /** 9228 * <code>optional bytes value = 2;</code> 9229 */ hasValue()9230 public boolean hasValue() { 9231 return ((bitField0_ & 0x00000002) == 0x00000002); 9232 } 9233 /** 9234 * <code>optional bytes value = 2;</code> 9235 */ getValue()9236 public com.google.protobuf.ByteString getValue() { 9237 return value_; 9238 } 9239 9240 // optional uint64 timestamp = 3; 9241 public static final int TIMESTAMP_FIELD_NUMBER = 3; 9242 private long timestamp_; 9243 /** 9244 * <code>optional uint64 timestamp = 3;</code> 9245 */ hasTimestamp()9246 public boolean hasTimestamp() { 9247 return ((bitField0_ & 0x00000004) == 0x00000004); 9248 } 9249 /** 9250 * <code>optional uint64 timestamp = 3;</code> 9251 */ getTimestamp()9252 public long getTimestamp() { 9253 return timestamp_; 9254 } 9255 9256 // optional .MutationProto.DeleteType delete_type = 4; 9257 public static final int DELETE_TYPE_FIELD_NUMBER = 4; 9258 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_; 9259 /** 9260 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9261 */ hasDeleteType()9262 public boolean hasDeleteType() { 9263 return ((bitField0_ & 0x00000008) == 0x00000008); 9264 } 9265 /** 9266 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9267 */ getDeleteType()9268 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { 9269 return deleteType_; 9270 } 9271 9272 // optional bytes tags = 5; 9273 public static final int TAGS_FIELD_NUMBER = 5; 9274 private com.google.protobuf.ByteString tags_; 9275 /** 9276 * <code>optional bytes tags = 5;</code> 9277 */ hasTags()9278 public boolean hasTags() { 9279 return ((bitField0_ & 0x00000010) == 0x00000010); 9280 } 9281 /** 9282 * <code>optional bytes tags = 5;</code> 9283 */ getTags()9284 public com.google.protobuf.ByteString getTags() { 9285 return tags_; 9286 } 9287 initFields()9288 private void initFields() { 9289 qualifier_ = com.google.protobuf.ByteString.EMPTY; 9290 value_ = com.google.protobuf.ByteString.EMPTY; 9291 timestamp_ = 0L; 9292 deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; 9293 tags_ = com.google.protobuf.ByteString.EMPTY; 9294 } 9295 private byte memoizedIsInitialized = -1; isInitialized()9296 public final boolean isInitialized() { 9297 byte isInitialized = memoizedIsInitialized; 9298 if (isInitialized != -1) return isInitialized == 1; 9299 9300 memoizedIsInitialized = 1; 9301 return true; 9302 } 9303 writeTo(com.google.protobuf.CodedOutputStream output)9304 public void writeTo(com.google.protobuf.CodedOutputStream output) 9305 throws java.io.IOException { 9306 getSerializedSize(); 9307 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9308 output.writeBytes(1, qualifier_); 9309 } 9310 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9311 output.writeBytes(2, value_); 9312 } 9313 if (((bitField0_ & 0x00000004) == 0x00000004)) { 9314 output.writeUInt64(3, timestamp_); 9315 } 9316 if (((bitField0_ & 0x00000008) == 0x00000008)) { 9317 output.writeEnum(4, deleteType_.getNumber()); 9318 } 9319 if (((bitField0_ & 0x00000010) == 0x00000010)) { 9320 output.writeBytes(5, tags_); 9321 } 9322 getUnknownFields().writeTo(output); 9323 } 9324 9325 private int memoizedSerializedSize = -1; getSerializedSize()9326 public int getSerializedSize() { 9327 int size = memoizedSerializedSize; 9328 if (size != -1) return size; 9329 9330 size = 0; 9331 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9332 size += com.google.protobuf.CodedOutputStream 9333 .computeBytesSize(1, qualifier_); 9334 } 9335 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9336 size += com.google.protobuf.CodedOutputStream 9337 .computeBytesSize(2, value_); 9338 } 9339 if (((bitField0_ & 0x00000004) == 0x00000004)) { 9340 size += com.google.protobuf.CodedOutputStream 9341 .computeUInt64Size(3, timestamp_); 9342 } 9343 if (((bitField0_ & 0x00000008) == 0x00000008)) { 9344 size += com.google.protobuf.CodedOutputStream 9345 .computeEnumSize(4, deleteType_.getNumber()); 9346 } 9347 if (((bitField0_ & 0x00000010) == 0x00000010)) { 9348 size += com.google.protobuf.CodedOutputStream 9349 .computeBytesSize(5, tags_); 9350 } 9351 size += getUnknownFields().getSerializedSize(); 9352 memoizedSerializedSize = size; 9353 return size; 9354 } 9355 9356 private static final long serialVersionUID = 0L; 9357 @java.lang.Override writeReplace()9358 protected java.lang.Object writeReplace() 9359 throws java.io.ObjectStreamException { 9360 return super.writeReplace(); 9361 } 9362 9363 @java.lang.Override equals(final java.lang.Object obj)9364 public boolean equals(final java.lang.Object obj) { 9365 if (obj == this) { 9366 return true; 9367 } 9368 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)) { 9369 return super.equals(obj); 9370 } 9371 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) obj; 9372 9373 boolean result = true; 9374 result = result && (hasQualifier() == other.hasQualifier()); 9375 if (hasQualifier()) { 9376 result = result && getQualifier() 9377 .equals(other.getQualifier()); 9378 } 9379 result = result && (hasValue() == other.hasValue()); 9380 if (hasValue()) { 9381 result = result && getValue() 9382 .equals(other.getValue()); 9383 } 9384 result = result && (hasTimestamp() == other.hasTimestamp()); 9385 if (hasTimestamp()) { 9386 result = result && (getTimestamp() 9387 == other.getTimestamp()); 9388 } 9389 result = result && (hasDeleteType() == other.hasDeleteType()); 9390 if (hasDeleteType()) { 9391 result = result && 9392 (getDeleteType() == other.getDeleteType()); 9393 } 9394 result = result && (hasTags() == other.hasTags()); 9395 if (hasTags()) { 9396 result = result && getTags() 9397 .equals(other.getTags()); 9398 } 9399 result = result && 9400 getUnknownFields().equals(other.getUnknownFields()); 9401 return result; 9402 } 9403 9404 private int memoizedHashCode = 0; 9405 @java.lang.Override hashCode()9406 public int hashCode() { 9407 if (memoizedHashCode != 0) { 9408 return memoizedHashCode; 9409 } 9410 int hash = 41; 9411 hash = (19 * hash) + getDescriptorForType().hashCode(); 9412 if (hasQualifier()) { 9413 hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; 9414 hash = (53 * hash) + getQualifier().hashCode(); 9415 } 9416 if (hasValue()) { 9417 hash = (37 * hash) + VALUE_FIELD_NUMBER; 9418 hash = (53 * hash) + getValue().hashCode(); 9419 } 9420 if (hasTimestamp()) { 9421 hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; 9422 hash = (53 * hash) + hashLong(getTimestamp()); 9423 } 9424 if (hasDeleteType()) { 9425 hash = (37 * hash) + DELETE_TYPE_FIELD_NUMBER; 9426 hash = (53 * hash) + hashEnum(getDeleteType()); 9427 } 9428 if (hasTags()) { 9429 hash = (37 * hash) + TAGS_FIELD_NUMBER; 9430 hash = (53 * hash) + getTags().hashCode(); 9431 } 9432 hash = (29 * hash) + getUnknownFields().hashCode(); 9433 memoizedHashCode = hash; 9434 return hash; 9435 } 9436 parseFrom( com.google.protobuf.ByteString data)9437 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( 9438 com.google.protobuf.ByteString data) 9439 throws com.google.protobuf.InvalidProtocolBufferException { 9440 return PARSER.parseFrom(data); 9441 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9442 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( 9443 com.google.protobuf.ByteString data, 9444 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9445 throws com.google.protobuf.InvalidProtocolBufferException { 9446 return PARSER.parseFrom(data, extensionRegistry); 9447 } parseFrom(byte[] data)9448 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(byte[] data) 9449 throws com.google.protobuf.InvalidProtocolBufferException { 9450 return PARSER.parseFrom(data); 9451 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9452 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( 9453 byte[] data, 9454 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9455 throws com.google.protobuf.InvalidProtocolBufferException { 9456 return PARSER.parseFrom(data, extensionRegistry); 9457 } parseFrom(java.io.InputStream input)9458 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) 9459 throws java.io.IOException { 9460 return PARSER.parseFrom(input); 9461 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9462 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( 9463 java.io.InputStream input, 9464 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9465 throws java.io.IOException { 9466 return PARSER.parseFrom(input, extensionRegistry); 9467 } parseDelimitedFrom(java.io.InputStream input)9468 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) 9469 throws java.io.IOException { 9470 return PARSER.parseDelimitedFrom(input); 9471 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9472 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseDelimitedFrom( 9473 java.io.InputStream input, 9474 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9475 throws java.io.IOException { 9476 return PARSER.parseDelimitedFrom(input, extensionRegistry); 9477 } parseFrom( com.google.protobuf.CodedInputStream input)9478 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( 9479 com.google.protobuf.CodedInputStream input) 9480 throws java.io.IOException { 9481 return PARSER.parseFrom(input); 9482 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9483 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parseFrom( 9484 com.google.protobuf.CodedInputStream input, 9485 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9486 throws java.io.IOException { 9487 return PARSER.parseFrom(input, extensionRegistry); 9488 } 9489 newBuilder()9490 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()9491 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype)9492 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue prototype) { 9493 return newBuilder().mergeFrom(prototype); 9494 } toBuilder()9495 public Builder toBuilder() { return newBuilder(this); } 9496 9497 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9498 protected Builder newBuilderForType( 9499 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9500 Builder builder = new Builder(parent); 9501 return builder; 9502 } 9503 /** 9504 * Protobuf type {@code MutationProto.ColumnValue.QualifierValue} 9505 */ 9506 public static final class Builder extends 9507 com.google.protobuf.GeneratedMessage.Builder<Builder> 9508 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder { 9509 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9510 getDescriptor() { 9511 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor; 9512 } 9513 9514 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9515 internalGetFieldAccessorTable() { 9516 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_fieldAccessorTable 9517 .ensureFieldAccessorsInitialized( 9518 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder.class); 9519 } 9520 9521 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.newBuilder() Builder()9522 private Builder() { 9523 maybeForceBuilderInitialization(); 9524 } 9525 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9526 private Builder( 9527 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9528 super(parent); 9529 maybeForceBuilderInitialization(); 9530 } maybeForceBuilderInitialization()9531 private void maybeForceBuilderInitialization() { 9532 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 9533 } 9534 } create()9535 private static Builder create() { 9536 return new Builder(); 9537 } 9538 clear()9539 public Builder clear() { 9540 super.clear(); 9541 qualifier_ = com.google.protobuf.ByteString.EMPTY; 9542 bitField0_ = (bitField0_ & ~0x00000001); 9543 value_ = com.google.protobuf.ByteString.EMPTY; 9544 bitField0_ = (bitField0_ & ~0x00000002); 9545 timestamp_ = 0L; 9546 bitField0_ = (bitField0_ & ~0x00000004); 9547 deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; 9548 bitField0_ = (bitField0_ & ~0x00000008); 9549 tags_ = com.google.protobuf.ByteString.EMPTY; 9550 bitField0_ = (bitField0_ & ~0x00000010); 9551 return this; 9552 } 9553 clone()9554 public Builder clone() { 9555 return create().mergeFrom(buildPartial()); 9556 } 9557 9558 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()9559 getDescriptorForType() { 9560 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_QualifierValue_descriptor; 9561 } 9562 getDefaultInstanceForType()9563 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getDefaultInstanceForType() { 9564 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance(); 9565 } 9566 build()9567 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue build() { 9568 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = buildPartial(); 9569 if (!result.isInitialized()) { 9570 throw newUninitializedMessageException(result); 9571 } 9572 return result; 9573 } 9574 buildPartial()9575 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue buildPartial() { 9576 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue(this); 9577 int from_bitField0_ = bitField0_; 9578 int to_bitField0_ = 0; 9579 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 9580 to_bitField0_ |= 0x00000001; 9581 } 9582 result.qualifier_ = qualifier_; 9583 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 9584 to_bitField0_ |= 0x00000002; 9585 } 9586 result.value_ = value_; 9587 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 9588 to_bitField0_ |= 0x00000004; 9589 } 9590 result.timestamp_ = timestamp_; 9591 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 9592 to_bitField0_ |= 0x00000008; 9593 } 9594 result.deleteType_ = deleteType_; 9595 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 9596 to_bitField0_ |= 0x00000010; 9597 } 9598 result.tags_ = tags_; 9599 result.bitField0_ = to_bitField0_; 9600 onBuilt(); 9601 return result; 9602 } 9603 mergeFrom(com.google.protobuf.Message other)9604 public Builder mergeFrom(com.google.protobuf.Message other) { 9605 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) { 9606 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue)other); 9607 } else { 9608 super.mergeFrom(other); 9609 return this; 9610 } 9611 } 9612 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other)9613 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue other) { 9614 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()) return this; 9615 if (other.hasQualifier()) { 9616 setQualifier(other.getQualifier()); 9617 } 9618 if (other.hasValue()) { 9619 setValue(other.getValue()); 9620 } 9621 if (other.hasTimestamp()) { 9622 setTimestamp(other.getTimestamp()); 9623 } 9624 if (other.hasDeleteType()) { 9625 setDeleteType(other.getDeleteType()); 9626 } 9627 if (other.hasTags()) { 9628 setTags(other.getTags()); 9629 } 9630 this.mergeUnknownFields(other.getUnknownFields()); 9631 return this; 9632 } 9633 isInitialized()9634 public final boolean isInitialized() { 9635 return true; 9636 } 9637 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9638 public Builder mergeFrom( 9639 com.google.protobuf.CodedInputStream input, 9640 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9641 throws java.io.IOException { 9642 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue parsedMessage = null; 9643 try { 9644 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 9645 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9646 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue) e.getUnfinishedMessage(); 9647 throw e; 9648 } finally { 9649 if (parsedMessage != null) { 9650 mergeFrom(parsedMessage); 9651 } 9652 } 9653 return this; 9654 } 9655 private int bitField0_; 9656 9657 // optional bytes qualifier = 1; 9658 private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; 9659 /** 9660 * <code>optional bytes qualifier = 1;</code> 9661 */ hasQualifier()9662 public boolean hasQualifier() { 9663 return ((bitField0_ & 0x00000001) == 0x00000001); 9664 } 9665 /** 9666 * <code>optional bytes qualifier = 1;</code> 9667 */ getQualifier()9668 public com.google.protobuf.ByteString getQualifier() { 9669 return qualifier_; 9670 } 9671 /** 9672 * <code>optional bytes qualifier = 1;</code> 9673 */ setQualifier(com.google.protobuf.ByteString value)9674 public Builder setQualifier(com.google.protobuf.ByteString value) { 9675 if (value == null) { 9676 throw new NullPointerException(); 9677 } 9678 bitField0_ |= 0x00000001; 9679 qualifier_ = value; 9680 onChanged(); 9681 return this; 9682 } 9683 /** 9684 * <code>optional bytes qualifier = 1;</code> 9685 */ clearQualifier()9686 public Builder clearQualifier() { 9687 bitField0_ = (bitField0_ & ~0x00000001); 9688 qualifier_ = getDefaultInstance().getQualifier(); 9689 onChanged(); 9690 return this; 9691 } 9692 9693 // optional bytes value = 2; 9694 private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; 9695 /** 9696 * <code>optional bytes value = 2;</code> 9697 */ hasValue()9698 public boolean hasValue() { 9699 return ((bitField0_ & 0x00000002) == 0x00000002); 9700 } 9701 /** 9702 * <code>optional bytes value = 2;</code> 9703 */ getValue()9704 public com.google.protobuf.ByteString getValue() { 9705 return value_; 9706 } 9707 /** 9708 * <code>optional bytes value = 2;</code> 9709 */ setValue(com.google.protobuf.ByteString value)9710 public Builder setValue(com.google.protobuf.ByteString value) { 9711 if (value == null) { 9712 throw new NullPointerException(); 9713 } 9714 bitField0_ |= 0x00000002; 9715 value_ = value; 9716 onChanged(); 9717 return this; 9718 } 9719 /** 9720 * <code>optional bytes value = 2;</code> 9721 */ clearValue()9722 public Builder clearValue() { 9723 bitField0_ = (bitField0_ & ~0x00000002); 9724 value_ = getDefaultInstance().getValue(); 9725 onChanged(); 9726 return this; 9727 } 9728 9729 // optional uint64 timestamp = 3; 9730 private long timestamp_ ; 9731 /** 9732 * <code>optional uint64 timestamp = 3;</code> 9733 */ hasTimestamp()9734 public boolean hasTimestamp() { 9735 return ((bitField0_ & 0x00000004) == 0x00000004); 9736 } 9737 /** 9738 * <code>optional uint64 timestamp = 3;</code> 9739 */ getTimestamp()9740 public long getTimestamp() { 9741 return timestamp_; 9742 } 9743 /** 9744 * <code>optional uint64 timestamp = 3;</code> 9745 */ setTimestamp(long value)9746 public Builder setTimestamp(long value) { 9747 bitField0_ |= 0x00000004; 9748 timestamp_ = value; 9749 onChanged(); 9750 return this; 9751 } 9752 /** 9753 * <code>optional uint64 timestamp = 3;</code> 9754 */ clearTimestamp()9755 public Builder clearTimestamp() { 9756 bitField0_ = (bitField0_ & ~0x00000004); 9757 timestamp_ = 0L; 9758 onChanged(); 9759 return this; 9760 } 9761 9762 // optional .MutationProto.DeleteType delete_type = 4; 9763 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; 9764 /** 9765 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9766 */ hasDeleteType()9767 public boolean hasDeleteType() { 9768 return ((bitField0_ & 0x00000008) == 0x00000008); 9769 } 9770 /** 9771 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9772 */ getDeleteType()9773 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType getDeleteType() { 9774 return deleteType_; 9775 } 9776 /** 9777 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9778 */ setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value)9779 public Builder setDeleteType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType value) { 9780 if (value == null) { 9781 throw new NullPointerException(); 9782 } 9783 bitField0_ |= 0x00000008; 9784 deleteType_ = value; 9785 onChanged(); 9786 return this; 9787 } 9788 /** 9789 * <code>optional .MutationProto.DeleteType delete_type = 4;</code> 9790 */ clearDeleteType()9791 public Builder clearDeleteType() { 9792 bitField0_ = (bitField0_ & ~0x00000008); 9793 deleteType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.DeleteType.DELETE_ONE_VERSION; 9794 onChanged(); 9795 return this; 9796 } 9797 9798 // optional bytes tags = 5; 9799 private com.google.protobuf.ByteString tags_ = com.google.protobuf.ByteString.EMPTY; 9800 /** 9801 * <code>optional bytes tags = 5;</code> 9802 */ hasTags()9803 public boolean hasTags() { 9804 return ((bitField0_ & 0x00000010) == 0x00000010); 9805 } 9806 /** 9807 * <code>optional bytes tags = 5;</code> 9808 */ getTags()9809 public com.google.protobuf.ByteString getTags() { 9810 return tags_; 9811 } 9812 /** 9813 * <code>optional bytes tags = 5;</code> 9814 */ setTags(com.google.protobuf.ByteString value)9815 public Builder setTags(com.google.protobuf.ByteString value) { 9816 if (value == null) { 9817 throw new NullPointerException(); 9818 } 9819 bitField0_ |= 0x00000010; 9820 tags_ = value; 9821 onChanged(); 9822 return this; 9823 } 9824 /** 9825 * <code>optional bytes tags = 5;</code> 9826 */ clearTags()9827 public Builder clearTags() { 9828 bitField0_ = (bitField0_ & ~0x00000010); 9829 tags_ = getDefaultInstance().getTags(); 9830 onChanged(); 9831 return this; 9832 } 9833 9834 // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue.QualifierValue) 9835 } 9836 9837 static { 9838 defaultInstance = new QualifierValue(true); defaultInstance.initFields()9839 defaultInstance.initFields(); 9840 } 9841 9842 // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue.QualifierValue) 9843 } 9844 9845 private int bitField0_; 9846 // required bytes family = 1; 9847 public static final int FAMILY_FIELD_NUMBER = 1; 9848 private com.google.protobuf.ByteString family_; 9849 /** 9850 * <code>required bytes family = 1;</code> 9851 */ hasFamily()9852 public boolean hasFamily() { 9853 return ((bitField0_ & 0x00000001) == 0x00000001); 9854 } 9855 /** 9856 * <code>required bytes family = 1;</code> 9857 */ getFamily()9858 public com.google.protobuf.ByteString getFamily() { 9859 return family_; 9860 } 9861 9862 // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2; 9863 public static final int QUALIFIER_VALUE_FIELD_NUMBER = 2; 9864 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_; 9865 /** 9866 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 9867 */ getQualifierValueList()9868 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() { 9869 return qualifierValue_; 9870 } 9871 /** 9872 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 9873 */ 9874 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueOrBuilderList()9875 getQualifierValueOrBuilderList() { 9876 return qualifierValue_; 9877 } 9878 /** 9879 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 9880 */ getQualifierValueCount()9881 public int getQualifierValueCount() { 9882 return qualifierValue_.size(); 9883 } 9884 /** 9885 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 9886 */ getQualifierValue(int index)9887 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) { 9888 return qualifierValue_.get(index); 9889 } 9890 /** 9891 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 9892 */ getQualifierValueOrBuilder( int index)9893 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( 9894 int index) { 9895 return qualifierValue_.get(index); 9896 } 9897 initFields()9898 private void initFields() { 9899 family_ = com.google.protobuf.ByteString.EMPTY; 9900 qualifierValue_ = java.util.Collections.emptyList(); 9901 } 9902 private byte memoizedIsInitialized = -1; isInitialized()9903 public final boolean isInitialized() { 9904 byte isInitialized = memoizedIsInitialized; 9905 if (isInitialized != -1) return isInitialized == 1; 9906 9907 if (!hasFamily()) { 9908 memoizedIsInitialized = 0; 9909 return false; 9910 } 9911 memoizedIsInitialized = 1; 9912 return true; 9913 } 9914 writeTo(com.google.protobuf.CodedOutputStream output)9915 public void writeTo(com.google.protobuf.CodedOutputStream output) 9916 throws java.io.IOException { 9917 getSerializedSize(); 9918 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9919 output.writeBytes(1, family_); 9920 } 9921 for (int i = 0; i < qualifierValue_.size(); i++) { 9922 output.writeMessage(2, qualifierValue_.get(i)); 9923 } 9924 getUnknownFields().writeTo(output); 9925 } 9926 9927 private int memoizedSerializedSize = -1; getSerializedSize()9928 public int getSerializedSize() { 9929 int size = memoizedSerializedSize; 9930 if (size != -1) return size; 9931 9932 size = 0; 9933 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9934 size += com.google.protobuf.CodedOutputStream 9935 .computeBytesSize(1, family_); 9936 } 9937 for (int i = 0; i < qualifierValue_.size(); i++) { 9938 size += com.google.protobuf.CodedOutputStream 9939 .computeMessageSize(2, qualifierValue_.get(i)); 9940 } 9941 size += getUnknownFields().getSerializedSize(); 9942 memoizedSerializedSize = size; 9943 return size; 9944 } 9945 9946 private static final long serialVersionUID = 0L; 9947 @java.lang.Override writeReplace()9948 protected java.lang.Object writeReplace() 9949 throws java.io.ObjectStreamException { 9950 return super.writeReplace(); 9951 } 9952 9953 @java.lang.Override equals(final java.lang.Object obj)9954 public boolean equals(final java.lang.Object obj) { 9955 if (obj == this) { 9956 return true; 9957 } 9958 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)) { 9959 return super.equals(obj); 9960 } 9961 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) obj; 9962 9963 boolean result = true; 9964 result = result && (hasFamily() == other.hasFamily()); 9965 if (hasFamily()) { 9966 result = result && getFamily() 9967 .equals(other.getFamily()); 9968 } 9969 result = result && getQualifierValueList() 9970 .equals(other.getQualifierValueList()); 9971 result = result && 9972 getUnknownFields().equals(other.getUnknownFields()); 9973 return result; 9974 } 9975 9976 private int memoizedHashCode = 0; 9977 @java.lang.Override hashCode()9978 public int hashCode() { 9979 if (memoizedHashCode != 0) { 9980 return memoizedHashCode; 9981 } 9982 int hash = 41; 9983 hash = (19 * hash) + getDescriptorForType().hashCode(); 9984 if (hasFamily()) { 9985 hash = (37 * hash) + FAMILY_FIELD_NUMBER; 9986 hash = (53 * hash) + getFamily().hashCode(); 9987 } 9988 if (getQualifierValueCount() > 0) { 9989 hash = (37 * hash) + QUALIFIER_VALUE_FIELD_NUMBER; 9990 hash = (53 * hash) + getQualifierValueList().hashCode(); 9991 } 9992 hash = (29 * hash) + getUnknownFields().hashCode(); 9993 memoizedHashCode = hash; 9994 return hash; 9995 } 9996 parseFrom( com.google.protobuf.ByteString data)9997 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( 9998 com.google.protobuf.ByteString data) 9999 throws com.google.protobuf.InvalidProtocolBufferException { 10000 return PARSER.parseFrom(data); 10001 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10002 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( 10003 com.google.protobuf.ByteString data, 10004 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10005 throws com.google.protobuf.InvalidProtocolBufferException { 10006 return PARSER.parseFrom(data, extensionRegistry); 10007 } parseFrom(byte[] data)10008 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(byte[] data) 10009 throws com.google.protobuf.InvalidProtocolBufferException { 10010 return PARSER.parseFrom(data); 10011 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10012 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( 10013 byte[] data, 10014 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10015 throws com.google.protobuf.InvalidProtocolBufferException { 10016 return PARSER.parseFrom(data, extensionRegistry); 10017 } parseFrom(java.io.InputStream input)10018 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom(java.io.InputStream input) 10019 throws java.io.IOException { 10020 return PARSER.parseFrom(input); 10021 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10022 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( 10023 java.io.InputStream input, 10024 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10025 throws java.io.IOException { 10026 return PARSER.parseFrom(input, extensionRegistry); 10027 } parseDelimitedFrom(java.io.InputStream input)10028 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom(java.io.InputStream input) 10029 throws java.io.IOException { 10030 return PARSER.parseDelimitedFrom(input); 10031 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10032 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseDelimitedFrom( 10033 java.io.InputStream input, 10034 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10035 throws java.io.IOException { 10036 return PARSER.parseDelimitedFrom(input, extensionRegistry); 10037 } parseFrom( com.google.protobuf.CodedInputStream input)10038 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( 10039 com.google.protobuf.CodedInputStream input) 10040 throws java.io.IOException { 10041 return PARSER.parseFrom(input); 10042 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10043 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parseFrom( 10044 com.google.protobuf.CodedInputStream input, 10045 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10046 throws java.io.IOException { 10047 return PARSER.parseFrom(input, extensionRegistry); 10048 } 10049 newBuilder()10050 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()10051 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype)10052 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue prototype) { 10053 return newBuilder().mergeFrom(prototype); 10054 } toBuilder()10055 public Builder toBuilder() { return newBuilder(this); } 10056 10057 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10058 protected Builder newBuilderForType( 10059 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10060 Builder builder = new Builder(parent); 10061 return builder; 10062 } 10063 /** 10064 * Protobuf type {@code MutationProto.ColumnValue} 10065 */ 10066 public static final class Builder extends 10067 com.google.protobuf.GeneratedMessage.Builder<Builder> 10068 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder { 10069 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10070 getDescriptor() { 10071 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor; 10072 } 10073 10074 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10075 internalGetFieldAccessorTable() { 10076 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_fieldAccessorTable 10077 .ensureFieldAccessorsInitialized( 10078 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder.class); 10079 } 10080 10081 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.newBuilder() Builder()10082 private Builder() { 10083 maybeForceBuilderInitialization(); 10084 } 10085 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10086 private Builder( 10087 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10088 super(parent); 10089 maybeForceBuilderInitialization(); 10090 } maybeForceBuilderInitialization()10091 private void maybeForceBuilderInitialization() { 10092 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 10093 getQualifierValueFieldBuilder(); 10094 } 10095 } create()10096 private static Builder create() { 10097 return new Builder(); 10098 } 10099 clear()10100 public Builder clear() { 10101 super.clear(); 10102 family_ = com.google.protobuf.ByteString.EMPTY; 10103 bitField0_ = (bitField0_ & ~0x00000001); 10104 if (qualifierValueBuilder_ == null) { 10105 qualifierValue_ = java.util.Collections.emptyList(); 10106 bitField0_ = (bitField0_ & ~0x00000002); 10107 } else { 10108 qualifierValueBuilder_.clear(); 10109 } 10110 return this; 10111 } 10112 clone()10113 public Builder clone() { 10114 return create().mergeFrom(buildPartial()); 10115 } 10116 10117 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()10118 getDescriptorForType() { 10119 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_ColumnValue_descriptor; 10120 } 10121 getDefaultInstanceForType()10122 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getDefaultInstanceForType() { 10123 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance(); 10124 } 10125 build()10126 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue build() { 10127 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = buildPartial(); 10128 if (!result.isInitialized()) { 10129 throw newUninitializedMessageException(result); 10130 } 10131 return result; 10132 } 10133 buildPartial()10134 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue buildPartial() { 10135 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue(this); 10136 int from_bitField0_ = bitField0_; 10137 int to_bitField0_ = 0; 10138 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 10139 to_bitField0_ |= 0x00000001; 10140 } 10141 result.family_ = family_; 10142 if (qualifierValueBuilder_ == null) { 10143 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10144 qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); 10145 bitField0_ = (bitField0_ & ~0x00000002); 10146 } 10147 result.qualifierValue_ = qualifierValue_; 10148 } else { 10149 result.qualifierValue_ = qualifierValueBuilder_.build(); 10150 } 10151 result.bitField0_ = to_bitField0_; 10152 onBuilt(); 10153 return result; 10154 } 10155 mergeFrom(com.google.protobuf.Message other)10156 public Builder mergeFrom(com.google.protobuf.Message other) { 10157 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) { 10158 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue)other); 10159 } else { 10160 super.mergeFrom(other); 10161 return this; 10162 } 10163 } 10164 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other)10165 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue other) { 10166 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()) return this; 10167 if (other.hasFamily()) { 10168 setFamily(other.getFamily()); 10169 } 10170 if (qualifierValueBuilder_ == null) { 10171 if (!other.qualifierValue_.isEmpty()) { 10172 if (qualifierValue_.isEmpty()) { 10173 qualifierValue_ = other.qualifierValue_; 10174 bitField0_ = (bitField0_ & ~0x00000002); 10175 } else { 10176 ensureQualifierValueIsMutable(); 10177 qualifierValue_.addAll(other.qualifierValue_); 10178 } 10179 onChanged(); 10180 } 10181 } else { 10182 if (!other.qualifierValue_.isEmpty()) { 10183 if (qualifierValueBuilder_.isEmpty()) { 10184 qualifierValueBuilder_.dispose(); 10185 qualifierValueBuilder_ = null; 10186 qualifierValue_ = other.qualifierValue_; 10187 bitField0_ = (bitField0_ & ~0x00000002); 10188 qualifierValueBuilder_ = 10189 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 10190 getQualifierValueFieldBuilder() : null; 10191 } else { 10192 qualifierValueBuilder_.addAllMessages(other.qualifierValue_); 10193 } 10194 } 10195 } 10196 this.mergeUnknownFields(other.getUnknownFields()); 10197 return this; 10198 } 10199 isInitialized()10200 public final boolean isInitialized() { 10201 if (!hasFamily()) { 10202 10203 return false; 10204 } 10205 return true; 10206 } 10207 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10208 public Builder mergeFrom( 10209 com.google.protobuf.CodedInputStream input, 10210 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10211 throws java.io.IOException { 10212 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue parsedMessage = null; 10213 try { 10214 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 10215 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10216 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue) e.getUnfinishedMessage(); 10217 throw e; 10218 } finally { 10219 if (parsedMessage != null) { 10220 mergeFrom(parsedMessage); 10221 } 10222 } 10223 return this; 10224 } 10225 private int bitField0_; 10226 10227 // required bytes family = 1; 10228 private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; 10229 /** 10230 * <code>required bytes family = 1;</code> 10231 */ hasFamily()10232 public boolean hasFamily() { 10233 return ((bitField0_ & 0x00000001) == 0x00000001); 10234 } 10235 /** 10236 * <code>required bytes family = 1;</code> 10237 */ getFamily()10238 public com.google.protobuf.ByteString getFamily() { 10239 return family_; 10240 } 10241 /** 10242 * <code>required bytes family = 1;</code> 10243 */ setFamily(com.google.protobuf.ByteString value)10244 public Builder setFamily(com.google.protobuf.ByteString value) { 10245 if (value == null) { 10246 throw new NullPointerException(); 10247 } 10248 bitField0_ |= 0x00000001; 10249 family_ = value; 10250 onChanged(); 10251 return this; 10252 } 10253 /** 10254 * <code>required bytes family = 1;</code> 10255 */ clearFamily()10256 public Builder clearFamily() { 10257 bitField0_ = (bitField0_ & ~0x00000001); 10258 family_ = getDefaultInstance().getFamily(); 10259 onChanged(); 10260 return this; 10261 } 10262 10263 // repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2; 10264 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> qualifierValue_ = 10265 java.util.Collections.emptyList(); ensureQualifierValueIsMutable()10266 private void ensureQualifierValueIsMutable() { 10267 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 10268 qualifierValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue>(qualifierValue_); 10269 bitField0_ |= 0x00000002; 10270 } 10271 } 10272 10273 private com.google.protobuf.RepeatedFieldBuilder< 10274 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; 10275 10276 /** 10277 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10278 */ getQualifierValueList()10279 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> getQualifierValueList() { 10280 if (qualifierValueBuilder_ == null) { 10281 return java.util.Collections.unmodifiableList(qualifierValue_); 10282 } else { 10283 return qualifierValueBuilder_.getMessageList(); 10284 } 10285 } 10286 /** 10287 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10288 */ getQualifierValueCount()10289 public int getQualifierValueCount() { 10290 if (qualifierValueBuilder_ == null) { 10291 return qualifierValue_.size(); 10292 } else { 10293 return qualifierValueBuilder_.getCount(); 10294 } 10295 } 10296 /** 10297 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10298 */ getQualifierValue(int index)10299 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue getQualifierValue(int index) { 10300 if (qualifierValueBuilder_ == null) { 10301 return qualifierValue_.get(index); 10302 } else { 10303 return qualifierValueBuilder_.getMessage(index); 10304 } 10305 } 10306 /** 10307 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10308 */ setQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value)10309 public Builder setQualifierValue( 10310 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { 10311 if (qualifierValueBuilder_ == null) { 10312 if (value == null) { 10313 throw new NullPointerException(); 10314 } 10315 ensureQualifierValueIsMutable(); 10316 qualifierValue_.set(index, value); 10317 onChanged(); 10318 } else { 10319 qualifierValueBuilder_.setMessage(index, value); 10320 } 10321 return this; 10322 } 10323 /** 10324 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10325 */ setQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue)10326 public Builder setQualifierValue( 10327 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { 10328 if (qualifierValueBuilder_ == null) { 10329 ensureQualifierValueIsMutable(); 10330 qualifierValue_.set(index, builderForValue.build()); 10331 onChanged(); 10332 } else { 10333 qualifierValueBuilder_.setMessage(index, builderForValue.build()); 10334 } 10335 return this; 10336 } 10337 /** 10338 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10339 */ addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value)10340 public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { 10341 if (qualifierValueBuilder_ == null) { 10342 if (value == null) { 10343 throw new NullPointerException(); 10344 } 10345 ensureQualifierValueIsMutable(); 10346 qualifierValue_.add(value); 10347 onChanged(); 10348 } else { 10349 qualifierValueBuilder_.addMessage(value); 10350 } 10351 return this; 10352 } 10353 /** 10354 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10355 */ addQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value)10356 public Builder addQualifierValue( 10357 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue value) { 10358 if (qualifierValueBuilder_ == null) { 10359 if (value == null) { 10360 throw new NullPointerException(); 10361 } 10362 ensureQualifierValueIsMutable(); 10363 qualifierValue_.add(index, value); 10364 onChanged(); 10365 } else { 10366 qualifierValueBuilder_.addMessage(index, value); 10367 } 10368 return this; 10369 } 10370 /** 10371 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10372 */ addQualifierValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue)10373 public Builder addQualifierValue( 10374 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { 10375 if (qualifierValueBuilder_ == null) { 10376 ensureQualifierValueIsMutable(); 10377 qualifierValue_.add(builderForValue.build()); 10378 onChanged(); 10379 } else { 10380 qualifierValueBuilder_.addMessage(builderForValue.build()); 10381 } 10382 return this; 10383 } 10384 /** 10385 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10386 */ addQualifierValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue)10387 public Builder addQualifierValue( 10388 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder builderForValue) { 10389 if (qualifierValueBuilder_ == null) { 10390 ensureQualifierValueIsMutable(); 10391 qualifierValue_.add(index, builderForValue.build()); 10392 onChanged(); 10393 } else { 10394 qualifierValueBuilder_.addMessage(index, builderForValue.build()); 10395 } 10396 return this; 10397 } 10398 /** 10399 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10400 */ addAllQualifierValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values)10401 public Builder addAllQualifierValue( 10402 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue> values) { 10403 if (qualifierValueBuilder_ == null) { 10404 ensureQualifierValueIsMutable(); 10405 super.addAll(values, qualifierValue_); 10406 onChanged(); 10407 } else { 10408 qualifierValueBuilder_.addAllMessages(values); 10409 } 10410 return this; 10411 } 10412 /** 10413 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10414 */ clearQualifierValue()10415 public Builder clearQualifierValue() { 10416 if (qualifierValueBuilder_ == null) { 10417 qualifierValue_ = java.util.Collections.emptyList(); 10418 bitField0_ = (bitField0_ & ~0x00000002); 10419 onChanged(); 10420 } else { 10421 qualifierValueBuilder_.clear(); 10422 } 10423 return this; 10424 } 10425 /** 10426 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10427 */ removeQualifierValue(int index)10428 public Builder removeQualifierValue(int index) { 10429 if (qualifierValueBuilder_ == null) { 10430 ensureQualifierValueIsMutable(); 10431 qualifierValue_.remove(index); 10432 onChanged(); 10433 } else { 10434 qualifierValueBuilder_.remove(index); 10435 } 10436 return this; 10437 } 10438 /** 10439 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10440 */ getQualifierValueBuilder( int index)10441 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( 10442 int index) { 10443 return getQualifierValueFieldBuilder().getBuilder(index); 10444 } 10445 /** 10446 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10447 */ getQualifierValueOrBuilder( int index)10448 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( 10449 int index) { 10450 if (qualifierValueBuilder_ == null) { 10451 return qualifierValue_.get(index); } else { 10452 return qualifierValueBuilder_.getMessageOrBuilder(index); 10453 } 10454 } 10455 /** 10456 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10457 */ 10458 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueOrBuilderList()10459 getQualifierValueOrBuilderList() { 10460 if (qualifierValueBuilder_ != null) { 10461 return qualifierValueBuilder_.getMessageOrBuilderList(); 10462 } else { 10463 return java.util.Collections.unmodifiableList(qualifierValue_); 10464 } 10465 } 10466 /** 10467 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10468 */ addQualifierValueBuilder()10469 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { 10470 return getQualifierValueFieldBuilder().addBuilder( 10471 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()); 10472 } 10473 /** 10474 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10475 */ addQualifierValueBuilder( int index)10476 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( 10477 int index) { 10478 return getQualifierValueFieldBuilder().addBuilder( 10479 index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.getDefaultInstance()); 10480 } 10481 /** 10482 * <code>repeated .MutationProto.ColumnValue.QualifierValue qualifier_value = 2;</code> 10483 */ 10484 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder> getQualifierValueBuilderList()10485 getQualifierValueBuilderList() { 10486 return getQualifierValueFieldBuilder().getBuilderList(); 10487 } 10488 private com.google.protobuf.RepeatedFieldBuilder< 10489 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder> getQualifierValueFieldBuilder()10490 getQualifierValueFieldBuilder() { 10491 if (qualifierValueBuilder_ == null) { 10492 qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 10493 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.QualifierValueOrBuilder>( 10494 qualifierValue_, 10495 ((bitField0_ & 0x00000002) == 0x00000002), 10496 getParentForChildren(), 10497 isClean()); 10498 qualifierValue_ = null; 10499 } 10500 return qualifierValueBuilder_; 10501 } 10502 10503 // @@protoc_insertion_point(builder_scope:MutationProto.ColumnValue) 10504 } 10505 10506 static { 10507 defaultInstance = new ColumnValue(true); defaultInstance.initFields()10508 defaultInstance.initFields(); 10509 } 10510 10511 // @@protoc_insertion_point(class_scope:MutationProto.ColumnValue) 10512 } 10513 10514 private int bitField0_; 10515 // optional bytes row = 1; 10516 public static final int ROW_FIELD_NUMBER = 1; 10517 private com.google.protobuf.ByteString row_; 10518 /** 10519 * <code>optional bytes row = 1;</code> 10520 */ hasRow()10521 public boolean hasRow() { 10522 return ((bitField0_ & 0x00000001) == 0x00000001); 10523 } 10524 /** 10525 * <code>optional bytes row = 1;</code> 10526 */ getRow()10527 public com.google.protobuf.ByteString getRow() { 10528 return row_; 10529 } 10530 10531 // optional .MutationProto.MutationType mutate_type = 2; 10532 public static final int MUTATE_TYPE_FIELD_NUMBER = 2; 10533 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_; 10534 /** 10535 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 10536 */ hasMutateType()10537 public boolean hasMutateType() { 10538 return ((bitField0_ & 0x00000002) == 0x00000002); 10539 } 10540 /** 10541 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 10542 */ getMutateType()10543 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { 10544 return mutateType_; 10545 } 10546 10547 // repeated .MutationProto.ColumnValue column_value = 3; 10548 public static final int COLUMN_VALUE_FIELD_NUMBER = 3; 10549 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_; 10550 /** 10551 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 10552 */ getColumnValueList()10553 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() { 10554 return columnValue_; 10555 } 10556 /** 10557 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 10558 */ 10559 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueOrBuilderList()10560 getColumnValueOrBuilderList() { 10561 return columnValue_; 10562 } 10563 /** 10564 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 10565 */ getColumnValueCount()10566 public int getColumnValueCount() { 10567 return columnValue_.size(); 10568 } 10569 /** 10570 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 10571 */ getColumnValue(int index)10572 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) { 10573 return columnValue_.get(index); 10574 } 10575 /** 10576 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 10577 */ getColumnValueOrBuilder( int index)10578 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( 10579 int index) { 10580 return columnValue_.get(index); 10581 } 10582 10583 // optional uint64 timestamp = 4; 10584 public static final int TIMESTAMP_FIELD_NUMBER = 4; 10585 private long timestamp_; 10586 /** 10587 * <code>optional uint64 timestamp = 4;</code> 10588 */ hasTimestamp()10589 public boolean hasTimestamp() { 10590 return ((bitField0_ & 0x00000004) == 0x00000004); 10591 } 10592 /** 10593 * <code>optional uint64 timestamp = 4;</code> 10594 */ getTimestamp()10595 public long getTimestamp() { 10596 return timestamp_; 10597 } 10598 10599 // repeated .NameBytesPair attribute = 5; 10600 public static final int ATTRIBUTE_FIELD_NUMBER = 5; 10601 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_; 10602 /** 10603 * <code>repeated .NameBytesPair attribute = 5;</code> 10604 */ getAttributeList()10605 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { 10606 return attribute_; 10607 } 10608 /** 10609 * <code>repeated .NameBytesPair attribute = 5;</code> 10610 */ 10611 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()10612 getAttributeOrBuilderList() { 10613 return attribute_; 10614 } 10615 /** 10616 * <code>repeated .NameBytesPair attribute = 5;</code> 10617 */ getAttributeCount()10618 public int getAttributeCount() { 10619 return attribute_.size(); 10620 } 10621 /** 10622 * <code>repeated .NameBytesPair attribute = 5;</code> 10623 */ getAttribute(int index)10624 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { 10625 return attribute_.get(index); 10626 } 10627 /** 10628 * <code>repeated .NameBytesPair attribute = 5;</code> 10629 */ getAttributeOrBuilder( int index)10630 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 10631 int index) { 10632 return attribute_.get(index); 10633 } 10634 10635 // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT]; 10636 public static final int DURABILITY_FIELD_NUMBER = 6; 10637 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_; 10638 /** 10639 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 10640 */ hasDurability()10641 public boolean hasDurability() { 10642 return ((bitField0_ & 0x00000008) == 0x00000008); 10643 } 10644 /** 10645 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 10646 */ getDurability()10647 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() { 10648 return durability_; 10649 } 10650 10651 // optional .TimeRange time_range = 7; 10652 public static final int TIME_RANGE_FIELD_NUMBER = 7; 10653 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; 10654 /** 10655 * <code>optional .TimeRange time_range = 7;</code> 10656 * 10657 * <pre> 10658 * For some mutations, a result may be returned, in which case, 10659 * time range can be specified for potential performance gain 10660 * </pre> 10661 */ hasTimeRange()10662 public boolean hasTimeRange() { 10663 return ((bitField0_ & 0x00000010) == 0x00000010); 10664 } 10665 /** 10666 * <code>optional .TimeRange time_range = 7;</code> 10667 * 10668 * <pre> 10669 * For some mutations, a result may be returned, in which case, 10670 * time range can be specified for potential performance gain 10671 * </pre> 10672 */ getTimeRange()10673 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 10674 return timeRange_; 10675 } 10676 /** 10677 * <code>optional .TimeRange time_range = 7;</code> 10678 * 10679 * <pre> 10680 * For some mutations, a result may be returned, in which case, 10681 * time range can be specified for potential performance gain 10682 * </pre> 10683 */ getTimeRangeOrBuilder()10684 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 10685 return timeRange_; 10686 } 10687 10688 // optional int32 associated_cell_count = 8; 10689 public static final int ASSOCIATED_CELL_COUNT_FIELD_NUMBER = 8; 10690 private int associatedCellCount_; 10691 /** 10692 * <code>optional int32 associated_cell_count = 8;</code> 10693 * 10694 * <pre> 10695 * The below count is set when the associated cells are NOT 10696 * part of this protobuf message; they are passed alongside 10697 * and then this Message is a placeholder with metadata. The 10698 * count is needed to know how many to peel off the block of Cells as 10699 * ours. NOTE: This is different from the pb managed cell_count of the 10700 * 'cell' field above which is non-null when the cells are pb'd. 10701 * </pre> 10702 */ hasAssociatedCellCount()10703 public boolean hasAssociatedCellCount() { 10704 return ((bitField0_ & 0x00000020) == 0x00000020); 10705 } 10706 /** 10707 * <code>optional int32 associated_cell_count = 8;</code> 10708 * 10709 * <pre> 10710 * The below count is set when the associated cells are NOT 10711 * part of this protobuf message; they are passed alongside 10712 * and then this Message is a placeholder with metadata. The 10713 * count is needed to know how many to peel off the block of Cells as 10714 * ours. NOTE: This is different from the pb managed cell_count of the 10715 * 'cell' field above which is non-null when the cells are pb'd. 10716 * </pre> 10717 */ getAssociatedCellCount()10718 public int getAssociatedCellCount() { 10719 return associatedCellCount_; 10720 } 10721 10722 // optional uint64 nonce = 9; 10723 public static final int NONCE_FIELD_NUMBER = 9; 10724 private long nonce_; 10725 /** 10726 * <code>optional uint64 nonce = 9;</code> 10727 */ hasNonce()10728 public boolean hasNonce() { 10729 return ((bitField0_ & 0x00000040) == 0x00000040); 10730 } 10731 /** 10732 * <code>optional uint64 nonce = 9;</code> 10733 */ getNonce()10734 public long getNonce() { 10735 return nonce_; 10736 } 10737 initFields()10738 private void initFields() { 10739 row_ = com.google.protobuf.ByteString.EMPTY; 10740 mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; 10741 columnValue_ = java.util.Collections.emptyList(); 10742 timestamp_ = 0L; 10743 attribute_ = java.util.Collections.emptyList(); 10744 durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; 10745 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 10746 associatedCellCount_ = 0; 10747 nonce_ = 0L; 10748 } 10749 private byte memoizedIsInitialized = -1; isInitialized()10750 public final boolean isInitialized() { 10751 byte isInitialized = memoizedIsInitialized; 10752 if (isInitialized != -1) return isInitialized == 1; 10753 10754 for (int i = 0; i < getColumnValueCount(); i++) { 10755 if (!getColumnValue(i).isInitialized()) { 10756 memoizedIsInitialized = 0; 10757 return false; 10758 } 10759 } 10760 for (int i = 0; i < getAttributeCount(); i++) { 10761 if (!getAttribute(i).isInitialized()) { 10762 memoizedIsInitialized = 0; 10763 return false; 10764 } 10765 } 10766 memoizedIsInitialized = 1; 10767 return true; 10768 } 10769 writeTo(com.google.protobuf.CodedOutputStream output)10770 public void writeTo(com.google.protobuf.CodedOutputStream output) 10771 throws java.io.IOException { 10772 getSerializedSize(); 10773 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10774 output.writeBytes(1, row_); 10775 } 10776 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10777 output.writeEnum(2, mutateType_.getNumber()); 10778 } 10779 for (int i = 0; i < columnValue_.size(); i++) { 10780 output.writeMessage(3, columnValue_.get(i)); 10781 } 10782 if (((bitField0_ & 0x00000004) == 0x00000004)) { 10783 output.writeUInt64(4, timestamp_); 10784 } 10785 for (int i = 0; i < attribute_.size(); i++) { 10786 output.writeMessage(5, attribute_.get(i)); 10787 } 10788 if (((bitField0_ & 0x00000008) == 0x00000008)) { 10789 output.writeEnum(6, durability_.getNumber()); 10790 } 10791 if (((bitField0_ & 0x00000010) == 0x00000010)) { 10792 output.writeMessage(7, timeRange_); 10793 } 10794 if (((bitField0_ & 0x00000020) == 0x00000020)) { 10795 output.writeInt32(8, associatedCellCount_); 10796 } 10797 if (((bitField0_ & 0x00000040) == 0x00000040)) { 10798 output.writeUInt64(9, nonce_); 10799 } 10800 getUnknownFields().writeTo(output); 10801 } 10802 10803 private int memoizedSerializedSize = -1; getSerializedSize()10804 public int getSerializedSize() { 10805 int size = memoizedSerializedSize; 10806 if (size != -1) return size; 10807 10808 size = 0; 10809 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10810 size += com.google.protobuf.CodedOutputStream 10811 .computeBytesSize(1, row_); 10812 } 10813 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10814 size += com.google.protobuf.CodedOutputStream 10815 .computeEnumSize(2, mutateType_.getNumber()); 10816 } 10817 for (int i = 0; i < columnValue_.size(); i++) { 10818 size += com.google.protobuf.CodedOutputStream 10819 .computeMessageSize(3, columnValue_.get(i)); 10820 } 10821 if (((bitField0_ & 0x00000004) == 0x00000004)) { 10822 size += com.google.protobuf.CodedOutputStream 10823 .computeUInt64Size(4, timestamp_); 10824 } 10825 for (int i = 0; i < attribute_.size(); i++) { 10826 size += com.google.protobuf.CodedOutputStream 10827 .computeMessageSize(5, attribute_.get(i)); 10828 } 10829 if (((bitField0_ & 0x00000008) == 0x00000008)) { 10830 size += com.google.protobuf.CodedOutputStream 10831 .computeEnumSize(6, durability_.getNumber()); 10832 } 10833 if (((bitField0_ & 0x00000010) == 0x00000010)) { 10834 size += com.google.protobuf.CodedOutputStream 10835 .computeMessageSize(7, timeRange_); 10836 } 10837 if (((bitField0_ & 0x00000020) == 0x00000020)) { 10838 size += com.google.protobuf.CodedOutputStream 10839 .computeInt32Size(8, associatedCellCount_); 10840 } 10841 if (((bitField0_ & 0x00000040) == 0x00000040)) { 10842 size += com.google.protobuf.CodedOutputStream 10843 .computeUInt64Size(9, nonce_); 10844 } 10845 size += getUnknownFields().getSerializedSize(); 10846 memoizedSerializedSize = size; 10847 return size; 10848 } 10849 10850 private static final long serialVersionUID = 0L; 10851 @java.lang.Override writeReplace()10852 protected java.lang.Object writeReplace() 10853 throws java.io.ObjectStreamException { 10854 return super.writeReplace(); 10855 } 10856 10857 @java.lang.Override equals(final java.lang.Object obj)10858 public boolean equals(final java.lang.Object obj) { 10859 if (obj == this) { 10860 return true; 10861 } 10862 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)) { 10863 return super.equals(obj); 10864 } 10865 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) obj; 10866 10867 boolean result = true; 10868 result = result && (hasRow() == other.hasRow()); 10869 if (hasRow()) { 10870 result = result && getRow() 10871 .equals(other.getRow()); 10872 } 10873 result = result && (hasMutateType() == other.hasMutateType()); 10874 if (hasMutateType()) { 10875 result = result && 10876 (getMutateType() == other.getMutateType()); 10877 } 10878 result = result && getColumnValueList() 10879 .equals(other.getColumnValueList()); 10880 result = result && (hasTimestamp() == other.hasTimestamp()); 10881 if (hasTimestamp()) { 10882 result = result && (getTimestamp() 10883 == other.getTimestamp()); 10884 } 10885 result = result && getAttributeList() 10886 .equals(other.getAttributeList()); 10887 result = result && (hasDurability() == other.hasDurability()); 10888 if (hasDurability()) { 10889 result = result && 10890 (getDurability() == other.getDurability()); 10891 } 10892 result = result && (hasTimeRange() == other.hasTimeRange()); 10893 if (hasTimeRange()) { 10894 result = result && getTimeRange() 10895 .equals(other.getTimeRange()); 10896 } 10897 result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount()); 10898 if (hasAssociatedCellCount()) { 10899 result = result && (getAssociatedCellCount() 10900 == other.getAssociatedCellCount()); 10901 } 10902 result = result && (hasNonce() == other.hasNonce()); 10903 if (hasNonce()) { 10904 result = result && (getNonce() 10905 == other.getNonce()); 10906 } 10907 result = result && 10908 getUnknownFields().equals(other.getUnknownFields()); 10909 return result; 10910 } 10911 10912 private int memoizedHashCode = 0; 10913 @java.lang.Override hashCode()10914 public int hashCode() { 10915 if (memoizedHashCode != 0) { 10916 return memoizedHashCode; 10917 } 10918 int hash = 41; 10919 hash = (19 * hash) + getDescriptorForType().hashCode(); 10920 if (hasRow()) { 10921 hash = (37 * hash) + ROW_FIELD_NUMBER; 10922 hash = (53 * hash) + getRow().hashCode(); 10923 } 10924 if (hasMutateType()) { 10925 hash = (37 * hash) + MUTATE_TYPE_FIELD_NUMBER; 10926 hash = (53 * hash) + hashEnum(getMutateType()); 10927 } 10928 if (getColumnValueCount() > 0) { 10929 hash = (37 * hash) + COLUMN_VALUE_FIELD_NUMBER; 10930 hash = (53 * hash) + getColumnValueList().hashCode(); 10931 } 10932 if (hasTimestamp()) { 10933 hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; 10934 hash = (53 * hash) + hashLong(getTimestamp()); 10935 } 10936 if (getAttributeCount() > 0) { 10937 hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; 10938 hash = (53 * hash) + getAttributeList().hashCode(); 10939 } 10940 if (hasDurability()) { 10941 hash = (37 * hash) + DURABILITY_FIELD_NUMBER; 10942 hash = (53 * hash) + hashEnum(getDurability()); 10943 } 10944 if (hasTimeRange()) { 10945 hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; 10946 hash = (53 * hash) + getTimeRange().hashCode(); 10947 } 10948 if (hasAssociatedCellCount()) { 10949 hash = (37 * hash) + ASSOCIATED_CELL_COUNT_FIELD_NUMBER; 10950 hash = (53 * hash) + getAssociatedCellCount(); 10951 } 10952 if (hasNonce()) { 10953 hash = (37 * hash) + NONCE_FIELD_NUMBER; 10954 hash = (53 * hash) + hashLong(getNonce()); 10955 } 10956 hash = (29 * hash) + getUnknownFields().hashCode(); 10957 memoizedHashCode = hash; 10958 return hash; 10959 } 10960 parseFrom( com.google.protobuf.ByteString data)10961 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( 10962 com.google.protobuf.ByteString data) 10963 throws com.google.protobuf.InvalidProtocolBufferException { 10964 return PARSER.parseFrom(data); 10965 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10966 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( 10967 com.google.protobuf.ByteString data, 10968 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10969 throws com.google.protobuf.InvalidProtocolBufferException { 10970 return PARSER.parseFrom(data, extensionRegistry); 10971 } parseFrom(byte[] data)10972 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(byte[] data) 10973 throws com.google.protobuf.InvalidProtocolBufferException { 10974 return PARSER.parseFrom(data); 10975 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10976 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( 10977 byte[] data, 10978 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10979 throws com.google.protobuf.InvalidProtocolBufferException { 10980 return PARSER.parseFrom(data, extensionRegistry); 10981 } parseFrom(java.io.InputStream input)10982 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom(java.io.InputStream input) 10983 throws java.io.IOException { 10984 return PARSER.parseFrom(input); 10985 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10986 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( 10987 java.io.InputStream input, 10988 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10989 throws java.io.IOException { 10990 return PARSER.parseFrom(input, extensionRegistry); 10991 } parseDelimitedFrom(java.io.InputStream input)10992 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom(java.io.InputStream input) 10993 throws java.io.IOException { 10994 return PARSER.parseDelimitedFrom(input); 10995 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10996 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseDelimitedFrom( 10997 java.io.InputStream input, 10998 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10999 throws java.io.IOException { 11000 return PARSER.parseDelimitedFrom(input, extensionRegistry); 11001 } parseFrom( com.google.protobuf.CodedInputStream input)11002 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( 11003 com.google.protobuf.CodedInputStream input) 11004 throws java.io.IOException { 11005 return PARSER.parseFrom(input); 11006 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11007 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parseFrom( 11008 com.google.protobuf.CodedInputStream input, 11009 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11010 throws java.io.IOException { 11011 return PARSER.parseFrom(input, extensionRegistry); 11012 } 11013 newBuilder()11014 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()11015 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype)11016 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto prototype) { 11017 return newBuilder().mergeFrom(prototype); 11018 } toBuilder()11019 public Builder toBuilder() { return newBuilder(this); } 11020 11021 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11022 protected Builder newBuilderForType( 11023 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 11024 Builder builder = new Builder(parent); 11025 return builder; 11026 } 11027 /** 11028 * Protobuf type {@code MutationProto} 11029 * 11030 * <pre> 11031 ** 11032 * A specific mutation inside a mutate request. 11033 * It can be an append, increment, put or delete based 11034 * on the mutation type. It can be fully filled in or 11035 * only metadata present because data is being carried 11036 * elsewhere outside of pb. 11037 * </pre> 11038 */ 11039 public static final class Builder extends 11040 com.google.protobuf.GeneratedMessage.Builder<Builder> 11041 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder { 11042 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()11043 getDescriptor() { 11044 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor; 11045 } 11046 11047 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()11048 internalGetFieldAccessorTable() { 11049 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_fieldAccessorTable 11050 .ensureFieldAccessorsInitialized( 11051 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder.class); 11052 } 11053 11054 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder() Builder()11055 private Builder() { 11056 maybeForceBuilderInitialization(); 11057 } 11058 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11059 private Builder( 11060 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 11061 super(parent); 11062 maybeForceBuilderInitialization(); 11063 } maybeForceBuilderInitialization()11064 private void maybeForceBuilderInitialization() { 11065 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 11066 getColumnValueFieldBuilder(); 11067 getAttributeFieldBuilder(); 11068 getTimeRangeFieldBuilder(); 11069 } 11070 } create()11071 private static Builder create() { 11072 return new Builder(); 11073 } 11074 clear()11075 public Builder clear() { 11076 super.clear(); 11077 row_ = com.google.protobuf.ByteString.EMPTY; 11078 bitField0_ = (bitField0_ & ~0x00000001); 11079 mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; 11080 bitField0_ = (bitField0_ & ~0x00000002); 11081 if (columnValueBuilder_ == null) { 11082 columnValue_ = java.util.Collections.emptyList(); 11083 bitField0_ = (bitField0_ & ~0x00000004); 11084 } else { 11085 columnValueBuilder_.clear(); 11086 } 11087 timestamp_ = 0L; 11088 bitField0_ = (bitField0_ & ~0x00000008); 11089 if (attributeBuilder_ == null) { 11090 attribute_ = java.util.Collections.emptyList(); 11091 bitField0_ = (bitField0_ & ~0x00000010); 11092 } else { 11093 attributeBuilder_.clear(); 11094 } 11095 durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; 11096 bitField0_ = (bitField0_ & ~0x00000020); 11097 if (timeRangeBuilder_ == null) { 11098 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 11099 } else { 11100 timeRangeBuilder_.clear(); 11101 } 11102 bitField0_ = (bitField0_ & ~0x00000040); 11103 associatedCellCount_ = 0; 11104 bitField0_ = (bitField0_ & ~0x00000080); 11105 nonce_ = 0L; 11106 bitField0_ = (bitField0_ & ~0x00000100); 11107 return this; 11108 } 11109 clone()11110 public Builder clone() { 11111 return create().mergeFrom(buildPartial()); 11112 } 11113 11114 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()11115 getDescriptorForType() { 11116 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutationProto_descriptor; 11117 } 11118 getDefaultInstanceForType()11119 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getDefaultInstanceForType() { 11120 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); 11121 } 11122 build()11123 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto build() { 11124 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = buildPartial(); 11125 if (!result.isInitialized()) { 11126 throw newUninitializedMessageException(result); 11127 } 11128 return result; 11129 } 11130 buildPartial()11131 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto buildPartial() { 11132 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto(this); 11133 int from_bitField0_ = bitField0_; 11134 int to_bitField0_ = 0; 11135 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 11136 to_bitField0_ |= 0x00000001; 11137 } 11138 result.row_ = row_; 11139 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 11140 to_bitField0_ |= 0x00000002; 11141 } 11142 result.mutateType_ = mutateType_; 11143 if (columnValueBuilder_ == null) { 11144 if (((bitField0_ & 0x00000004) == 0x00000004)) { 11145 columnValue_ = java.util.Collections.unmodifiableList(columnValue_); 11146 bitField0_ = (bitField0_ & ~0x00000004); 11147 } 11148 result.columnValue_ = columnValue_; 11149 } else { 11150 result.columnValue_ = columnValueBuilder_.build(); 11151 } 11152 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 11153 to_bitField0_ |= 0x00000004; 11154 } 11155 result.timestamp_ = timestamp_; 11156 if (attributeBuilder_ == null) { 11157 if (((bitField0_ & 0x00000010) == 0x00000010)) { 11158 attribute_ = java.util.Collections.unmodifiableList(attribute_); 11159 bitField0_ = (bitField0_ & ~0x00000010); 11160 } 11161 result.attribute_ = attribute_; 11162 } else { 11163 result.attribute_ = attributeBuilder_.build(); 11164 } 11165 if (((from_bitField0_ & 0x00000020) == 0x00000020)) { 11166 to_bitField0_ |= 0x00000008; 11167 } 11168 result.durability_ = durability_; 11169 if (((from_bitField0_ & 0x00000040) == 0x00000040)) { 11170 to_bitField0_ |= 0x00000010; 11171 } 11172 if (timeRangeBuilder_ == null) { 11173 result.timeRange_ = timeRange_; 11174 } else { 11175 result.timeRange_ = timeRangeBuilder_.build(); 11176 } 11177 if (((from_bitField0_ & 0x00000080) == 0x00000080)) { 11178 to_bitField0_ |= 0x00000020; 11179 } 11180 result.associatedCellCount_ = associatedCellCount_; 11181 if (((from_bitField0_ & 0x00000100) == 0x00000100)) { 11182 to_bitField0_ |= 0x00000040; 11183 } 11184 result.nonce_ = nonce_; 11185 result.bitField0_ = to_bitField0_; 11186 onBuilt(); 11187 return result; 11188 } 11189 mergeFrom(com.google.protobuf.Message other)11190 public Builder mergeFrom(com.google.protobuf.Message other) { 11191 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) { 11192 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)other); 11193 } else { 11194 super.mergeFrom(other); 11195 return this; 11196 } 11197 } 11198 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other)11199 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto other) { 11200 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) return this; 11201 if (other.hasRow()) { 11202 setRow(other.getRow()); 11203 } 11204 if (other.hasMutateType()) { 11205 setMutateType(other.getMutateType()); 11206 } 11207 if (columnValueBuilder_ == null) { 11208 if (!other.columnValue_.isEmpty()) { 11209 if (columnValue_.isEmpty()) { 11210 columnValue_ = other.columnValue_; 11211 bitField0_ = (bitField0_ & ~0x00000004); 11212 } else { 11213 ensureColumnValueIsMutable(); 11214 columnValue_.addAll(other.columnValue_); 11215 } 11216 onChanged(); 11217 } 11218 } else { 11219 if (!other.columnValue_.isEmpty()) { 11220 if (columnValueBuilder_.isEmpty()) { 11221 columnValueBuilder_.dispose(); 11222 columnValueBuilder_ = null; 11223 columnValue_ = other.columnValue_; 11224 bitField0_ = (bitField0_ & ~0x00000004); 11225 columnValueBuilder_ = 11226 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 11227 getColumnValueFieldBuilder() : null; 11228 } else { 11229 columnValueBuilder_.addAllMessages(other.columnValue_); 11230 } 11231 } 11232 } 11233 if (other.hasTimestamp()) { 11234 setTimestamp(other.getTimestamp()); 11235 } 11236 if (attributeBuilder_ == null) { 11237 if (!other.attribute_.isEmpty()) { 11238 if (attribute_.isEmpty()) { 11239 attribute_ = other.attribute_; 11240 bitField0_ = (bitField0_ & ~0x00000010); 11241 } else { 11242 ensureAttributeIsMutable(); 11243 attribute_.addAll(other.attribute_); 11244 } 11245 onChanged(); 11246 } 11247 } else { 11248 if (!other.attribute_.isEmpty()) { 11249 if (attributeBuilder_.isEmpty()) { 11250 attributeBuilder_.dispose(); 11251 attributeBuilder_ = null; 11252 attribute_ = other.attribute_; 11253 bitField0_ = (bitField0_ & ~0x00000010); 11254 attributeBuilder_ = 11255 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 11256 getAttributeFieldBuilder() : null; 11257 } else { 11258 attributeBuilder_.addAllMessages(other.attribute_); 11259 } 11260 } 11261 } 11262 if (other.hasDurability()) { 11263 setDurability(other.getDurability()); 11264 } 11265 if (other.hasTimeRange()) { 11266 mergeTimeRange(other.getTimeRange()); 11267 } 11268 if (other.hasAssociatedCellCount()) { 11269 setAssociatedCellCount(other.getAssociatedCellCount()); 11270 } 11271 if (other.hasNonce()) { 11272 setNonce(other.getNonce()); 11273 } 11274 this.mergeUnknownFields(other.getUnknownFields()); 11275 return this; 11276 } 11277 isInitialized()11278 public final boolean isInitialized() { 11279 for (int i = 0; i < getColumnValueCount(); i++) { 11280 if (!getColumnValue(i).isInitialized()) { 11281 11282 return false; 11283 } 11284 } 11285 for (int i = 0; i < getAttributeCount(); i++) { 11286 if (!getAttribute(i).isInitialized()) { 11287 11288 return false; 11289 } 11290 } 11291 return true; 11292 } 11293 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11294 public Builder mergeFrom( 11295 com.google.protobuf.CodedInputStream input, 11296 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11297 throws java.io.IOException { 11298 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto parsedMessage = null; 11299 try { 11300 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 11301 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 11302 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto) e.getUnfinishedMessage(); 11303 throw e; 11304 } finally { 11305 if (parsedMessage != null) { 11306 mergeFrom(parsedMessage); 11307 } 11308 } 11309 return this; 11310 } 11311 private int bitField0_; 11312 11313 // optional bytes row = 1; 11314 private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; 11315 /** 11316 * <code>optional bytes row = 1;</code> 11317 */ hasRow()11318 public boolean hasRow() { 11319 return ((bitField0_ & 0x00000001) == 0x00000001); 11320 } 11321 /** 11322 * <code>optional bytes row = 1;</code> 11323 */ getRow()11324 public com.google.protobuf.ByteString getRow() { 11325 return row_; 11326 } 11327 /** 11328 * <code>optional bytes row = 1;</code> 11329 */ setRow(com.google.protobuf.ByteString value)11330 public Builder setRow(com.google.protobuf.ByteString value) { 11331 if (value == null) { 11332 throw new NullPointerException(); 11333 } 11334 bitField0_ |= 0x00000001; 11335 row_ = value; 11336 onChanged(); 11337 return this; 11338 } 11339 /** 11340 * <code>optional bytes row = 1;</code> 11341 */ clearRow()11342 public Builder clearRow() { 11343 bitField0_ = (bitField0_ & ~0x00000001); 11344 row_ = getDefaultInstance().getRow(); 11345 onChanged(); 11346 return this; 11347 } 11348 11349 // optional .MutationProto.MutationType mutate_type = 2; 11350 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; 11351 /** 11352 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 11353 */ hasMutateType()11354 public boolean hasMutateType() { 11355 return ((bitField0_ & 0x00000002) == 0x00000002); 11356 } 11357 /** 11358 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 11359 */ getMutateType()11360 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType getMutateType() { 11361 return mutateType_; 11362 } 11363 /** 11364 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 11365 */ setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value)11366 public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType value) { 11367 if (value == null) { 11368 throw new NullPointerException(); 11369 } 11370 bitField0_ |= 0x00000002; 11371 mutateType_ = value; 11372 onChanged(); 11373 return this; 11374 } 11375 /** 11376 * <code>optional .MutationProto.MutationType mutate_type = 2;</code> 11377 */ clearMutateType()11378 public Builder clearMutateType() { 11379 bitField0_ = (bitField0_ & ~0x00000002); 11380 mutateType_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType.APPEND; 11381 onChanged(); 11382 return this; 11383 } 11384 11385 // repeated .MutationProto.ColumnValue column_value = 3; 11386 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> columnValue_ = 11387 java.util.Collections.emptyList(); ensureColumnValueIsMutable()11388 private void ensureColumnValueIsMutable() { 11389 if (!((bitField0_ & 0x00000004) == 0x00000004)) { 11390 columnValue_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue>(columnValue_); 11391 bitField0_ |= 0x00000004; 11392 } 11393 } 11394 11395 private com.google.protobuf.RepeatedFieldBuilder< 11396 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> columnValueBuilder_; 11397 11398 /** 11399 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11400 */ getColumnValueList()11401 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> getColumnValueList() { 11402 if (columnValueBuilder_ == null) { 11403 return java.util.Collections.unmodifiableList(columnValue_); 11404 } else { 11405 return columnValueBuilder_.getMessageList(); 11406 } 11407 } 11408 /** 11409 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11410 */ getColumnValueCount()11411 public int getColumnValueCount() { 11412 if (columnValueBuilder_ == null) { 11413 return columnValue_.size(); 11414 } else { 11415 return columnValueBuilder_.getCount(); 11416 } 11417 } 11418 /** 11419 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11420 */ getColumnValue(int index)11421 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue getColumnValue(int index) { 11422 if (columnValueBuilder_ == null) { 11423 return columnValue_.get(index); 11424 } else { 11425 return columnValueBuilder_.getMessage(index); 11426 } 11427 } 11428 /** 11429 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11430 */ setColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value)11431 public Builder setColumnValue( 11432 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { 11433 if (columnValueBuilder_ == null) { 11434 if (value == null) { 11435 throw new NullPointerException(); 11436 } 11437 ensureColumnValueIsMutable(); 11438 columnValue_.set(index, value); 11439 onChanged(); 11440 } else { 11441 columnValueBuilder_.setMessage(index, value); 11442 } 11443 return this; 11444 } 11445 /** 11446 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11447 */ setColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue)11448 public Builder setColumnValue( 11449 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { 11450 if (columnValueBuilder_ == null) { 11451 ensureColumnValueIsMutable(); 11452 columnValue_.set(index, builderForValue.build()); 11453 onChanged(); 11454 } else { 11455 columnValueBuilder_.setMessage(index, builderForValue.build()); 11456 } 11457 return this; 11458 } 11459 /** 11460 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11461 */ addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value)11462 public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { 11463 if (columnValueBuilder_ == null) { 11464 if (value == null) { 11465 throw new NullPointerException(); 11466 } 11467 ensureColumnValueIsMutable(); 11468 columnValue_.add(value); 11469 onChanged(); 11470 } else { 11471 columnValueBuilder_.addMessage(value); 11472 } 11473 return this; 11474 } 11475 /** 11476 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11477 */ addColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value)11478 public Builder addColumnValue( 11479 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue value) { 11480 if (columnValueBuilder_ == null) { 11481 if (value == null) { 11482 throw new NullPointerException(); 11483 } 11484 ensureColumnValueIsMutable(); 11485 columnValue_.add(index, value); 11486 onChanged(); 11487 } else { 11488 columnValueBuilder_.addMessage(index, value); 11489 } 11490 return this; 11491 } 11492 /** 11493 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11494 */ addColumnValue( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue)11495 public Builder addColumnValue( 11496 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { 11497 if (columnValueBuilder_ == null) { 11498 ensureColumnValueIsMutable(); 11499 columnValue_.add(builderForValue.build()); 11500 onChanged(); 11501 } else { 11502 columnValueBuilder_.addMessage(builderForValue.build()); 11503 } 11504 return this; 11505 } 11506 /** 11507 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11508 */ addColumnValue( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue)11509 public Builder addColumnValue( 11510 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder builderForValue) { 11511 if (columnValueBuilder_ == null) { 11512 ensureColumnValueIsMutable(); 11513 columnValue_.add(index, builderForValue.build()); 11514 onChanged(); 11515 } else { 11516 columnValueBuilder_.addMessage(index, builderForValue.build()); 11517 } 11518 return this; 11519 } 11520 /** 11521 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11522 */ addAllColumnValue( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values)11523 public Builder addAllColumnValue( 11524 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue> values) { 11525 if (columnValueBuilder_ == null) { 11526 ensureColumnValueIsMutable(); 11527 super.addAll(values, columnValue_); 11528 onChanged(); 11529 } else { 11530 columnValueBuilder_.addAllMessages(values); 11531 } 11532 return this; 11533 } 11534 /** 11535 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11536 */ clearColumnValue()11537 public Builder clearColumnValue() { 11538 if (columnValueBuilder_ == null) { 11539 columnValue_ = java.util.Collections.emptyList(); 11540 bitField0_ = (bitField0_ & ~0x00000004); 11541 onChanged(); 11542 } else { 11543 columnValueBuilder_.clear(); 11544 } 11545 return this; 11546 } 11547 /** 11548 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11549 */ removeColumnValue(int index)11550 public Builder removeColumnValue(int index) { 11551 if (columnValueBuilder_ == null) { 11552 ensureColumnValueIsMutable(); 11553 columnValue_.remove(index); 11554 onChanged(); 11555 } else { 11556 columnValueBuilder_.remove(index); 11557 } 11558 return this; 11559 } 11560 /** 11561 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11562 */ getColumnValueBuilder( int index)11563 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder getColumnValueBuilder( 11564 int index) { 11565 return getColumnValueFieldBuilder().getBuilder(index); 11566 } 11567 /** 11568 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11569 */ getColumnValueOrBuilder( int index)11570 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder getColumnValueOrBuilder( 11571 int index) { 11572 if (columnValueBuilder_ == null) { 11573 return columnValue_.get(index); } else { 11574 return columnValueBuilder_.getMessageOrBuilder(index); 11575 } 11576 } 11577 /** 11578 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11579 */ 11580 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueOrBuilderList()11581 getColumnValueOrBuilderList() { 11582 if (columnValueBuilder_ != null) { 11583 return columnValueBuilder_.getMessageOrBuilderList(); 11584 } else { 11585 return java.util.Collections.unmodifiableList(columnValue_); 11586 } 11587 } 11588 /** 11589 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11590 */ addColumnValueBuilder()11591 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder() { 11592 return getColumnValueFieldBuilder().addBuilder( 11593 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()); 11594 } 11595 /** 11596 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11597 */ addColumnValueBuilder( int index)11598 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder addColumnValueBuilder( 11599 int index) { 11600 return getColumnValueFieldBuilder().addBuilder( 11601 index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.getDefaultInstance()); 11602 } 11603 /** 11604 * <code>repeated .MutationProto.ColumnValue column_value = 3;</code> 11605 */ 11606 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder> getColumnValueBuilderList()11607 getColumnValueBuilderList() { 11608 return getColumnValueFieldBuilder().getBuilderList(); 11609 } 11610 private com.google.protobuf.RepeatedFieldBuilder< 11611 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder> getColumnValueFieldBuilder()11612 getColumnValueFieldBuilder() { 11613 if (columnValueBuilder_ == null) { 11614 columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 11615 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.ColumnValueOrBuilder>( 11616 columnValue_, 11617 ((bitField0_ & 0x00000004) == 0x00000004), 11618 getParentForChildren(), 11619 isClean()); 11620 columnValue_ = null; 11621 } 11622 return columnValueBuilder_; 11623 } 11624 11625 // optional uint64 timestamp = 4; 11626 private long timestamp_ ; 11627 /** 11628 * <code>optional uint64 timestamp = 4;</code> 11629 */ hasTimestamp()11630 public boolean hasTimestamp() { 11631 return ((bitField0_ & 0x00000008) == 0x00000008); 11632 } 11633 /** 11634 * <code>optional uint64 timestamp = 4;</code> 11635 */ getTimestamp()11636 public long getTimestamp() { 11637 return timestamp_; 11638 } 11639 /** 11640 * <code>optional uint64 timestamp = 4;</code> 11641 */ setTimestamp(long value)11642 public Builder setTimestamp(long value) { 11643 bitField0_ |= 0x00000008; 11644 timestamp_ = value; 11645 onChanged(); 11646 return this; 11647 } 11648 /** 11649 * <code>optional uint64 timestamp = 4;</code> 11650 */ clearTimestamp()11651 public Builder clearTimestamp() { 11652 bitField0_ = (bitField0_ & ~0x00000008); 11653 timestamp_ = 0L; 11654 onChanged(); 11655 return this; 11656 } 11657 11658 // repeated .NameBytesPair attribute = 5; 11659 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ = 11660 java.util.Collections.emptyList(); ensureAttributeIsMutable()11661 private void ensureAttributeIsMutable() { 11662 if (!((bitField0_ & 0x00000010) == 0x00000010)) { 11663 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_); 11664 bitField0_ |= 0x00000010; 11665 } 11666 } 11667 11668 private com.google.protobuf.RepeatedFieldBuilder< 11669 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; 11670 11671 /** 11672 * <code>repeated .NameBytesPair attribute = 5;</code> 11673 */ getAttributeList()11674 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { 11675 if (attributeBuilder_ == null) { 11676 return java.util.Collections.unmodifiableList(attribute_); 11677 } else { 11678 return attributeBuilder_.getMessageList(); 11679 } 11680 } 11681 /** 11682 * <code>repeated .NameBytesPair attribute = 5;</code> 11683 */ getAttributeCount()11684 public int getAttributeCount() { 11685 if (attributeBuilder_ == null) { 11686 return attribute_.size(); 11687 } else { 11688 return attributeBuilder_.getCount(); 11689 } 11690 } 11691 /** 11692 * <code>repeated .NameBytesPair attribute = 5;</code> 11693 */ getAttribute(int index)11694 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { 11695 if (attributeBuilder_ == null) { 11696 return attribute_.get(index); 11697 } else { 11698 return attributeBuilder_.getMessage(index); 11699 } 11700 } 11701 /** 11702 * <code>repeated .NameBytesPair attribute = 5;</code> 11703 */ setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)11704 public Builder setAttribute( 11705 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 11706 if (attributeBuilder_ == null) { 11707 if (value == null) { 11708 throw new NullPointerException(); 11709 } 11710 ensureAttributeIsMutable(); 11711 attribute_.set(index, value); 11712 onChanged(); 11713 } else { 11714 attributeBuilder_.setMessage(index, value); 11715 } 11716 return this; 11717 } 11718 /** 11719 * <code>repeated .NameBytesPair attribute = 5;</code> 11720 */ setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)11721 public Builder setAttribute( 11722 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 11723 if (attributeBuilder_ == null) { 11724 ensureAttributeIsMutable(); 11725 attribute_.set(index, builderForValue.build()); 11726 onChanged(); 11727 } else { 11728 attributeBuilder_.setMessage(index, builderForValue.build()); 11729 } 11730 return this; 11731 } 11732 /** 11733 * <code>repeated .NameBytesPair attribute = 5;</code> 11734 */ addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)11735 public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 11736 if (attributeBuilder_ == null) { 11737 if (value == null) { 11738 throw new NullPointerException(); 11739 } 11740 ensureAttributeIsMutable(); 11741 attribute_.add(value); 11742 onChanged(); 11743 } else { 11744 attributeBuilder_.addMessage(value); 11745 } 11746 return this; 11747 } 11748 /** 11749 * <code>repeated .NameBytesPair attribute = 5;</code> 11750 */ addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)11751 public Builder addAttribute( 11752 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 11753 if (attributeBuilder_ == null) { 11754 if (value == null) { 11755 throw new NullPointerException(); 11756 } 11757 ensureAttributeIsMutable(); 11758 attribute_.add(index, value); 11759 onChanged(); 11760 } else { 11761 attributeBuilder_.addMessage(index, value); 11762 } 11763 return this; 11764 } 11765 /** 11766 * <code>repeated .NameBytesPair attribute = 5;</code> 11767 */ addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)11768 public Builder addAttribute( 11769 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 11770 if (attributeBuilder_ == null) { 11771 ensureAttributeIsMutable(); 11772 attribute_.add(builderForValue.build()); 11773 onChanged(); 11774 } else { 11775 attributeBuilder_.addMessage(builderForValue.build()); 11776 } 11777 return this; 11778 } 11779 /** 11780 * <code>repeated .NameBytesPair attribute = 5;</code> 11781 */ addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)11782 public Builder addAttribute( 11783 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 11784 if (attributeBuilder_ == null) { 11785 ensureAttributeIsMutable(); 11786 attribute_.add(index, builderForValue.build()); 11787 onChanged(); 11788 } else { 11789 attributeBuilder_.addMessage(index, builderForValue.build()); 11790 } 11791 return this; 11792 } 11793 /** 11794 * <code>repeated .NameBytesPair attribute = 5;</code> 11795 */ addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values)11796 public Builder addAllAttribute( 11797 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) { 11798 if (attributeBuilder_ == null) { 11799 ensureAttributeIsMutable(); 11800 super.addAll(values, attribute_); 11801 onChanged(); 11802 } else { 11803 attributeBuilder_.addAllMessages(values); 11804 } 11805 return this; 11806 } 11807 /** 11808 * <code>repeated .NameBytesPair attribute = 5;</code> 11809 */ clearAttribute()11810 public Builder clearAttribute() { 11811 if (attributeBuilder_ == null) { 11812 attribute_ = java.util.Collections.emptyList(); 11813 bitField0_ = (bitField0_ & ~0x00000010); 11814 onChanged(); 11815 } else { 11816 attributeBuilder_.clear(); 11817 } 11818 return this; 11819 } 11820 /** 11821 * <code>repeated .NameBytesPair attribute = 5;</code> 11822 */ removeAttribute(int index)11823 public Builder removeAttribute(int index) { 11824 if (attributeBuilder_ == null) { 11825 ensureAttributeIsMutable(); 11826 attribute_.remove(index); 11827 onChanged(); 11828 } else { 11829 attributeBuilder_.remove(index); 11830 } 11831 return this; 11832 } 11833 /** 11834 * <code>repeated .NameBytesPair attribute = 5;</code> 11835 */ getAttributeBuilder( int index)11836 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( 11837 int index) { 11838 return getAttributeFieldBuilder().getBuilder(index); 11839 } 11840 /** 11841 * <code>repeated .NameBytesPair attribute = 5;</code> 11842 */ getAttributeOrBuilder( int index)11843 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 11844 int index) { 11845 if (attributeBuilder_ == null) { 11846 return attribute_.get(index); } else { 11847 return attributeBuilder_.getMessageOrBuilder(index); 11848 } 11849 } 11850 /** 11851 * <code>repeated .NameBytesPair attribute = 5;</code> 11852 */ 11853 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()11854 getAttributeOrBuilderList() { 11855 if (attributeBuilder_ != null) { 11856 return attributeBuilder_.getMessageOrBuilderList(); 11857 } else { 11858 return java.util.Collections.unmodifiableList(attribute_); 11859 } 11860 } 11861 /** 11862 * <code>repeated .NameBytesPair attribute = 5;</code> 11863 */ addAttributeBuilder()11864 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { 11865 return getAttributeFieldBuilder().addBuilder( 11866 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); 11867 } 11868 /** 11869 * <code>repeated .NameBytesPair attribute = 5;</code> 11870 */ addAttributeBuilder( int index)11871 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( 11872 int index) { 11873 return getAttributeFieldBuilder().addBuilder( 11874 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); 11875 } 11876 /** 11877 * <code>repeated .NameBytesPair attribute = 5;</code> 11878 */ 11879 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> getAttributeBuilderList()11880 getAttributeBuilderList() { 11881 return getAttributeFieldBuilder().getBuilderList(); 11882 } 11883 private com.google.protobuf.RepeatedFieldBuilder< 11884 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder()11885 getAttributeFieldBuilder() { 11886 if (attributeBuilder_ == null) { 11887 attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 11888 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( 11889 attribute_, 11890 ((bitField0_ & 0x00000010) == 0x00000010), 11891 getParentForChildren(), 11892 isClean()); 11893 attribute_ = null; 11894 } 11895 return attributeBuilder_; 11896 } 11897 11898 // optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT]; 11899 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; 11900 /** 11901 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 11902 */ hasDurability()11903 public boolean hasDurability() { 11904 return ((bitField0_ & 0x00000020) == 0x00000020); 11905 } 11906 /** 11907 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 11908 */ getDurability()11909 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability getDurability() { 11910 return durability_; 11911 } 11912 /** 11913 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 11914 */ setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value)11915 public Builder setDurability(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability value) { 11916 if (value == null) { 11917 throw new NullPointerException(); 11918 } 11919 bitField0_ |= 0x00000020; 11920 durability_ = value; 11921 onChanged(); 11922 return this; 11923 } 11924 /** 11925 * <code>optional .MutationProto.Durability durability = 6 [default = USE_DEFAULT];</code> 11926 */ clearDurability()11927 public Builder clearDurability() { 11928 bitField0_ = (bitField0_ & ~0x00000020); 11929 durability_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Durability.USE_DEFAULT; 11930 onChanged(); 11931 return this; 11932 } 11933 11934 // optional .TimeRange time_range = 7; 11935 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 11936 private com.google.protobuf.SingleFieldBuilder< 11937 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; 11938 /** 11939 * <code>optional .TimeRange time_range = 7;</code> 11940 * 11941 * <pre> 11942 * For some mutations, a result may be returned, in which case, 11943 * time range can be specified for potential performance gain 11944 * </pre> 11945 */ hasTimeRange()11946 public boolean hasTimeRange() { 11947 return ((bitField0_ & 0x00000040) == 0x00000040); 11948 } 11949 /** 11950 * <code>optional .TimeRange time_range = 7;</code> 11951 * 11952 * <pre> 11953 * For some mutations, a result may be returned, in which case, 11954 * time range can be specified for potential performance gain 11955 * </pre> 11956 */ getTimeRange()11957 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 11958 if (timeRangeBuilder_ == null) { 11959 return timeRange_; 11960 } else { 11961 return timeRangeBuilder_.getMessage(); 11962 } 11963 } 11964 /** 11965 * <code>optional .TimeRange time_range = 7;</code> 11966 * 11967 * <pre> 11968 * For some mutations, a result may be returned, in which case, 11969 * time range can be specified for potential performance gain 11970 * </pre> 11971 */ setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)11972 public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 11973 if (timeRangeBuilder_ == null) { 11974 if (value == null) { 11975 throw new NullPointerException(); 11976 } 11977 timeRange_ = value; 11978 onChanged(); 11979 } else { 11980 timeRangeBuilder_.setMessage(value); 11981 } 11982 bitField0_ |= 0x00000040; 11983 return this; 11984 } 11985 /** 11986 * <code>optional .TimeRange time_range = 7;</code> 11987 * 11988 * <pre> 11989 * For some mutations, a result may be returned, in which case, 11990 * time range can be specified for potential performance gain 11991 * </pre> 11992 */ setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)11993 public Builder setTimeRange( 11994 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { 11995 if (timeRangeBuilder_ == null) { 11996 timeRange_ = builderForValue.build(); 11997 onChanged(); 11998 } else { 11999 timeRangeBuilder_.setMessage(builderForValue.build()); 12000 } 12001 bitField0_ |= 0x00000040; 12002 return this; 12003 } 12004 /** 12005 * <code>optional .TimeRange time_range = 7;</code> 12006 * 12007 * <pre> 12008 * For some mutations, a result may be returned, in which case, 12009 * time range can be specified for potential performance gain 12010 * </pre> 12011 */ mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)12012 public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 12013 if (timeRangeBuilder_ == null) { 12014 if (((bitField0_ & 0x00000040) == 0x00000040) && 12015 timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { 12016 timeRange_ = 12017 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); 12018 } else { 12019 timeRange_ = value; 12020 } 12021 onChanged(); 12022 } else { 12023 timeRangeBuilder_.mergeFrom(value); 12024 } 12025 bitField0_ |= 0x00000040; 12026 return this; 12027 } 12028 /** 12029 * <code>optional .TimeRange time_range = 7;</code> 12030 * 12031 * <pre> 12032 * For some mutations, a result may be returned, in which case, 12033 * time range can be specified for potential performance gain 12034 * </pre> 12035 */ clearTimeRange()12036 public Builder clearTimeRange() { 12037 if (timeRangeBuilder_ == null) { 12038 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 12039 onChanged(); 12040 } else { 12041 timeRangeBuilder_.clear(); 12042 } 12043 bitField0_ = (bitField0_ & ~0x00000040); 12044 return this; 12045 } 12046 /** 12047 * <code>optional .TimeRange time_range = 7;</code> 12048 * 12049 * <pre> 12050 * For some mutations, a result may be returned, in which case, 12051 * time range can be specified for potential performance gain 12052 * </pre> 12053 */ getTimeRangeBuilder()12054 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { 12055 bitField0_ |= 0x00000040; 12056 onChanged(); 12057 return getTimeRangeFieldBuilder().getBuilder(); 12058 } 12059 /** 12060 * <code>optional .TimeRange time_range = 7;</code> 12061 * 12062 * <pre> 12063 * For some mutations, a result may be returned, in which case, 12064 * time range can be specified for potential performance gain 12065 * </pre> 12066 */ getTimeRangeOrBuilder()12067 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 12068 if (timeRangeBuilder_ != null) { 12069 return timeRangeBuilder_.getMessageOrBuilder(); 12070 } else { 12071 return timeRange_; 12072 } 12073 } 12074 /** 12075 * <code>optional .TimeRange time_range = 7;</code> 12076 * 12077 * <pre> 12078 * For some mutations, a result may be returned, in which case, 12079 * time range can be specified for potential performance gain 12080 * </pre> 12081 */ 12082 private com.google.protobuf.SingleFieldBuilder< 12083 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder()12084 getTimeRangeFieldBuilder() { 12085 if (timeRangeBuilder_ == null) { 12086 timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< 12087 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( 12088 timeRange_, 12089 getParentForChildren(), 12090 isClean()); 12091 timeRange_ = null; 12092 } 12093 return timeRangeBuilder_; 12094 } 12095 12096 // optional int32 associated_cell_count = 8; 12097 private int associatedCellCount_ ; 12098 /** 12099 * <code>optional int32 associated_cell_count = 8;</code> 12100 * 12101 * <pre> 12102 * The below count is set when the associated cells are NOT 12103 * part of this protobuf message; they are passed alongside 12104 * and then this Message is a placeholder with metadata. The 12105 * count is needed to know how many to peel off the block of Cells as 12106 * ours. NOTE: This is different from the pb managed cell_count of the 12107 * 'cell' field above which is non-null when the cells are pb'd. 12108 * </pre> 12109 */ hasAssociatedCellCount()12110 public boolean hasAssociatedCellCount() { 12111 return ((bitField0_ & 0x00000080) == 0x00000080); 12112 } 12113 /** 12114 * <code>optional int32 associated_cell_count = 8;</code> 12115 * 12116 * <pre> 12117 * The below count is set when the associated cells are NOT 12118 * part of this protobuf message; they are passed alongside 12119 * and then this Message is a placeholder with metadata. The 12120 * count is needed to know how many to peel off the block of Cells as 12121 * ours. NOTE: This is different from the pb managed cell_count of the 12122 * 'cell' field above which is non-null when the cells are pb'd. 12123 * </pre> 12124 */ getAssociatedCellCount()12125 public int getAssociatedCellCount() { 12126 return associatedCellCount_; 12127 } 12128 /** 12129 * <code>optional int32 associated_cell_count = 8;</code> 12130 * 12131 * <pre> 12132 * The below count is set when the associated cells are NOT 12133 * part of this protobuf message; they are passed alongside 12134 * and then this Message is a placeholder with metadata. The 12135 * count is needed to know how many to peel off the block of Cells as 12136 * ours. NOTE: This is different from the pb managed cell_count of the 12137 * 'cell' field above which is non-null when the cells are pb'd. 12138 * </pre> 12139 */ setAssociatedCellCount(int value)12140 public Builder setAssociatedCellCount(int value) { 12141 bitField0_ |= 0x00000080; 12142 associatedCellCount_ = value; 12143 onChanged(); 12144 return this; 12145 } 12146 /** 12147 * <code>optional int32 associated_cell_count = 8;</code> 12148 * 12149 * <pre> 12150 * The below count is set when the associated cells are NOT 12151 * part of this protobuf message; they are passed alongside 12152 * and then this Message is a placeholder with metadata. The 12153 * count is needed to know how many to peel off the block of Cells as 12154 * ours. NOTE: This is different from the pb managed cell_count of the 12155 * 'cell' field above which is non-null when the cells are pb'd. 12156 * </pre> 12157 */ clearAssociatedCellCount()12158 public Builder clearAssociatedCellCount() { 12159 bitField0_ = (bitField0_ & ~0x00000080); 12160 associatedCellCount_ = 0; 12161 onChanged(); 12162 return this; 12163 } 12164 12165 // optional uint64 nonce = 9; 12166 private long nonce_ ; 12167 /** 12168 * <code>optional uint64 nonce = 9;</code> 12169 */ hasNonce()12170 public boolean hasNonce() { 12171 return ((bitField0_ & 0x00000100) == 0x00000100); 12172 } 12173 /** 12174 * <code>optional uint64 nonce = 9;</code> 12175 */ getNonce()12176 public long getNonce() { 12177 return nonce_; 12178 } 12179 /** 12180 * <code>optional uint64 nonce = 9;</code> 12181 */ setNonce(long value)12182 public Builder setNonce(long value) { 12183 bitField0_ |= 0x00000100; 12184 nonce_ = value; 12185 onChanged(); 12186 return this; 12187 } 12188 /** 12189 * <code>optional uint64 nonce = 9;</code> 12190 */ clearNonce()12191 public Builder clearNonce() { 12192 bitField0_ = (bitField0_ & ~0x00000100); 12193 nonce_ = 0L; 12194 onChanged(); 12195 return this; 12196 } 12197 12198 // @@protoc_insertion_point(builder_scope:MutationProto) 12199 } 12200 12201 static { 12202 defaultInstance = new MutationProto(true); defaultInstance.initFields()12203 defaultInstance.initFields(); 12204 } 12205 12206 // @@protoc_insertion_point(class_scope:MutationProto) 12207 } 12208 12209 public interface MutateRequestOrBuilder 12210 extends com.google.protobuf.MessageOrBuilder { 12211 12212 // required .RegionSpecifier region = 1; 12213 /** 12214 * <code>required .RegionSpecifier region = 1;</code> 12215 */ hasRegion()12216 boolean hasRegion(); 12217 /** 12218 * <code>required .RegionSpecifier region = 1;</code> 12219 */ getRegion()12220 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 12221 /** 12222 * <code>required .RegionSpecifier region = 1;</code> 12223 */ getRegionOrBuilder()12224 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 12225 12226 // required .MutationProto mutation = 2; 12227 /** 12228 * <code>required .MutationProto mutation = 2;</code> 12229 */ hasMutation()12230 boolean hasMutation(); 12231 /** 12232 * <code>required .MutationProto mutation = 2;</code> 12233 */ getMutation()12234 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation(); 12235 /** 12236 * <code>required .MutationProto mutation = 2;</code> 12237 */ getMutationOrBuilder()12238 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder(); 12239 12240 // optional .Condition condition = 3; 12241 /** 12242 * <code>optional .Condition condition = 3;</code> 12243 */ hasCondition()12244 boolean hasCondition(); 12245 /** 12246 * <code>optional .Condition condition = 3;</code> 12247 */ getCondition()12248 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition(); 12249 /** 12250 * <code>optional .Condition condition = 3;</code> 12251 */ getConditionOrBuilder()12252 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder(); 12253 12254 // optional uint64 nonce_group = 4; 12255 /** 12256 * <code>optional uint64 nonce_group = 4;</code> 12257 */ hasNonceGroup()12258 boolean hasNonceGroup(); 12259 /** 12260 * <code>optional uint64 nonce_group = 4;</code> 12261 */ getNonceGroup()12262 long getNonceGroup(); 12263 } 12264 /** 12265 * Protobuf type {@code MutateRequest} 12266 * 12267 * <pre> 12268 ** 12269 * The mutate request. Perform a single Mutate operation. 12270 * 12271 * Optionally, you can specify a condition. The mutate 12272 * will take place only if the condition is met. Otherwise, 12273 * the mutate will be ignored. In the response result, 12274 * parameter processed is used to indicate if the mutate 12275 * actually happened. 12276 * </pre> 12277 */ 12278 public static final class MutateRequest extends 12279 com.google.protobuf.GeneratedMessage 12280 implements MutateRequestOrBuilder { 12281 // Use MutateRequest.newBuilder() to construct. MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)12282 private MutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 12283 super(builder); 12284 this.unknownFields = builder.getUnknownFields(); 12285 } MutateRequest(boolean noInit)12286 private MutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 12287 12288 private static final MutateRequest defaultInstance; getDefaultInstance()12289 public static MutateRequest getDefaultInstance() { 12290 return defaultInstance; 12291 } 12292 getDefaultInstanceForType()12293 public MutateRequest getDefaultInstanceForType() { 12294 return defaultInstance; 12295 } 12296 12297 private final com.google.protobuf.UnknownFieldSet unknownFields; 12298 @java.lang.Override 12299 public final com.google.protobuf.UnknownFieldSet getUnknownFields()12300 getUnknownFields() { 12301 return this.unknownFields; 12302 } MutateRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12303 private MutateRequest( 12304 com.google.protobuf.CodedInputStream input, 12305 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12306 throws com.google.protobuf.InvalidProtocolBufferException { 12307 initFields(); 12308 int mutable_bitField0_ = 0; 12309 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 12310 com.google.protobuf.UnknownFieldSet.newBuilder(); 12311 try { 12312 boolean done = false; 12313 while (!done) { 12314 int tag = input.readTag(); 12315 switch (tag) { 12316 case 0: 12317 done = true; 12318 break; 12319 default: { 12320 if (!parseUnknownField(input, unknownFields, 12321 extensionRegistry, tag)) { 12322 done = true; 12323 } 12324 break; 12325 } 12326 case 10: { 12327 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 12328 if (((bitField0_ & 0x00000001) == 0x00000001)) { 12329 subBuilder = region_.toBuilder(); 12330 } 12331 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 12332 if (subBuilder != null) { 12333 subBuilder.mergeFrom(region_); 12334 region_ = subBuilder.buildPartial(); 12335 } 12336 bitField0_ |= 0x00000001; 12337 break; 12338 } 12339 case 18: { 12340 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = null; 12341 if (((bitField0_ & 0x00000002) == 0x00000002)) { 12342 subBuilder = mutation_.toBuilder(); 12343 } 12344 mutation_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry); 12345 if (subBuilder != null) { 12346 subBuilder.mergeFrom(mutation_); 12347 mutation_ = subBuilder.buildPartial(); 12348 } 12349 bitField0_ |= 0x00000002; 12350 break; 12351 } 12352 case 26: { 12353 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder subBuilder = null; 12354 if (((bitField0_ & 0x00000004) == 0x00000004)) { 12355 subBuilder = condition_.toBuilder(); 12356 } 12357 condition_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.PARSER, extensionRegistry); 12358 if (subBuilder != null) { 12359 subBuilder.mergeFrom(condition_); 12360 condition_ = subBuilder.buildPartial(); 12361 } 12362 bitField0_ |= 0x00000004; 12363 break; 12364 } 12365 case 32: { 12366 bitField0_ |= 0x00000008; 12367 nonceGroup_ = input.readUInt64(); 12368 break; 12369 } 12370 } 12371 } 12372 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 12373 throw e.setUnfinishedMessage(this); 12374 } catch (java.io.IOException e) { 12375 throw new com.google.protobuf.InvalidProtocolBufferException( 12376 e.getMessage()).setUnfinishedMessage(this); 12377 } finally { 12378 this.unknownFields = unknownFields.build(); 12379 makeExtensionsImmutable(); 12380 } 12381 } 12382 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()12383 getDescriptor() { 12384 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; 12385 } 12386 12387 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()12388 internalGetFieldAccessorTable() { 12389 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable 12390 .ensureFieldAccessorsInitialized( 12391 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); 12392 } 12393 12394 public static com.google.protobuf.Parser<MutateRequest> PARSER = 12395 new com.google.protobuf.AbstractParser<MutateRequest>() { 12396 public MutateRequest parsePartialFrom( 12397 com.google.protobuf.CodedInputStream input, 12398 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12399 throws com.google.protobuf.InvalidProtocolBufferException { 12400 return new MutateRequest(input, extensionRegistry); 12401 } 12402 }; 12403 12404 @java.lang.Override getParserForType()12405 public com.google.protobuf.Parser<MutateRequest> getParserForType() { 12406 return PARSER; 12407 } 12408 12409 private int bitField0_; 12410 // required .RegionSpecifier region = 1; 12411 public static final int REGION_FIELD_NUMBER = 1; 12412 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 12413 /** 12414 * <code>required .RegionSpecifier region = 1;</code> 12415 */ hasRegion()12416 public boolean hasRegion() { 12417 return ((bitField0_ & 0x00000001) == 0x00000001); 12418 } 12419 /** 12420 * <code>required .RegionSpecifier region = 1;</code> 12421 */ getRegion()12422 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 12423 return region_; 12424 } 12425 /** 12426 * <code>required .RegionSpecifier region = 1;</code> 12427 */ getRegionOrBuilder()12428 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 12429 return region_; 12430 } 12431 12432 // required .MutationProto mutation = 2; 12433 public static final int MUTATION_FIELD_NUMBER = 2; 12434 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_; 12435 /** 12436 * <code>required .MutationProto mutation = 2;</code> 12437 */ hasMutation()12438 public boolean hasMutation() { 12439 return ((bitField0_ & 0x00000002) == 0x00000002); 12440 } 12441 /** 12442 * <code>required .MutationProto mutation = 2;</code> 12443 */ getMutation()12444 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { 12445 return mutation_; 12446 } 12447 /** 12448 * <code>required .MutationProto mutation = 2;</code> 12449 */ getMutationOrBuilder()12450 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { 12451 return mutation_; 12452 } 12453 12454 // optional .Condition condition = 3; 12455 public static final int CONDITION_FIELD_NUMBER = 3; 12456 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_; 12457 /** 12458 * <code>optional .Condition condition = 3;</code> 12459 */ hasCondition()12460 public boolean hasCondition() { 12461 return ((bitField0_ & 0x00000004) == 0x00000004); 12462 } 12463 /** 12464 * <code>optional .Condition condition = 3;</code> 12465 */ getCondition()12466 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { 12467 return condition_; 12468 } 12469 /** 12470 * <code>optional .Condition condition = 3;</code> 12471 */ getConditionOrBuilder()12472 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { 12473 return condition_; 12474 } 12475 12476 // optional uint64 nonce_group = 4; 12477 public static final int NONCE_GROUP_FIELD_NUMBER = 4; 12478 private long nonceGroup_; 12479 /** 12480 * <code>optional uint64 nonce_group = 4;</code> 12481 */ hasNonceGroup()12482 public boolean hasNonceGroup() { 12483 return ((bitField0_ & 0x00000008) == 0x00000008); 12484 } 12485 /** 12486 * <code>optional uint64 nonce_group = 4;</code> 12487 */ getNonceGroup()12488 public long getNonceGroup() { 12489 return nonceGroup_; 12490 } 12491 initFields()12492 private void initFields() { 12493 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 12494 mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); 12495 condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); 12496 nonceGroup_ = 0L; 12497 } 12498 private byte memoizedIsInitialized = -1; isInitialized()12499 public final boolean isInitialized() { 12500 byte isInitialized = memoizedIsInitialized; 12501 if (isInitialized != -1) return isInitialized == 1; 12502 12503 if (!hasRegion()) { 12504 memoizedIsInitialized = 0; 12505 return false; 12506 } 12507 if (!hasMutation()) { 12508 memoizedIsInitialized = 0; 12509 return false; 12510 } 12511 if (!getRegion().isInitialized()) { 12512 memoizedIsInitialized = 0; 12513 return false; 12514 } 12515 if (!getMutation().isInitialized()) { 12516 memoizedIsInitialized = 0; 12517 return false; 12518 } 12519 if (hasCondition()) { 12520 if (!getCondition().isInitialized()) { 12521 memoizedIsInitialized = 0; 12522 return false; 12523 } 12524 } 12525 memoizedIsInitialized = 1; 12526 return true; 12527 } 12528 writeTo(com.google.protobuf.CodedOutputStream output)12529 public void writeTo(com.google.protobuf.CodedOutputStream output) 12530 throws java.io.IOException { 12531 getSerializedSize(); 12532 if (((bitField0_ & 0x00000001) == 0x00000001)) { 12533 output.writeMessage(1, region_); 12534 } 12535 if (((bitField0_ & 0x00000002) == 0x00000002)) { 12536 output.writeMessage(2, mutation_); 12537 } 12538 if (((bitField0_ & 0x00000004) == 0x00000004)) { 12539 output.writeMessage(3, condition_); 12540 } 12541 if (((bitField0_ & 0x00000008) == 0x00000008)) { 12542 output.writeUInt64(4, nonceGroup_); 12543 } 12544 getUnknownFields().writeTo(output); 12545 } 12546 12547 private int memoizedSerializedSize = -1; getSerializedSize()12548 public int getSerializedSize() { 12549 int size = memoizedSerializedSize; 12550 if (size != -1) return size; 12551 12552 size = 0; 12553 if (((bitField0_ & 0x00000001) == 0x00000001)) { 12554 size += com.google.protobuf.CodedOutputStream 12555 .computeMessageSize(1, region_); 12556 } 12557 if (((bitField0_ & 0x00000002) == 0x00000002)) { 12558 size += com.google.protobuf.CodedOutputStream 12559 .computeMessageSize(2, mutation_); 12560 } 12561 if (((bitField0_ & 0x00000004) == 0x00000004)) { 12562 size += com.google.protobuf.CodedOutputStream 12563 .computeMessageSize(3, condition_); 12564 } 12565 if (((bitField0_ & 0x00000008) == 0x00000008)) { 12566 size += com.google.protobuf.CodedOutputStream 12567 .computeUInt64Size(4, nonceGroup_); 12568 } 12569 size += getUnknownFields().getSerializedSize(); 12570 memoizedSerializedSize = size; 12571 return size; 12572 } 12573 12574 private static final long serialVersionUID = 0L; 12575 @java.lang.Override writeReplace()12576 protected java.lang.Object writeReplace() 12577 throws java.io.ObjectStreamException { 12578 return super.writeReplace(); 12579 } 12580 12581 @java.lang.Override equals(final java.lang.Object obj)12582 public boolean equals(final java.lang.Object obj) { 12583 if (obj == this) { 12584 return true; 12585 } 12586 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)) { 12587 return super.equals(obj); 12588 } 12589 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) obj; 12590 12591 boolean result = true; 12592 result = result && (hasRegion() == other.hasRegion()); 12593 if (hasRegion()) { 12594 result = result && getRegion() 12595 .equals(other.getRegion()); 12596 } 12597 result = result && (hasMutation() == other.hasMutation()); 12598 if (hasMutation()) { 12599 result = result && getMutation() 12600 .equals(other.getMutation()); 12601 } 12602 result = result && (hasCondition() == other.hasCondition()); 12603 if (hasCondition()) { 12604 result = result && getCondition() 12605 .equals(other.getCondition()); 12606 } 12607 result = result && (hasNonceGroup() == other.hasNonceGroup()); 12608 if (hasNonceGroup()) { 12609 result = result && (getNonceGroup() 12610 == other.getNonceGroup()); 12611 } 12612 result = result && 12613 getUnknownFields().equals(other.getUnknownFields()); 12614 return result; 12615 } 12616 12617 private int memoizedHashCode = 0; 12618 @java.lang.Override hashCode()12619 public int hashCode() { 12620 if (memoizedHashCode != 0) { 12621 return memoizedHashCode; 12622 } 12623 int hash = 41; 12624 hash = (19 * hash) + getDescriptorForType().hashCode(); 12625 if (hasRegion()) { 12626 hash = (37 * hash) + REGION_FIELD_NUMBER; 12627 hash = (53 * hash) + getRegion().hashCode(); 12628 } 12629 if (hasMutation()) { 12630 hash = (37 * hash) + MUTATION_FIELD_NUMBER; 12631 hash = (53 * hash) + getMutation().hashCode(); 12632 } 12633 if (hasCondition()) { 12634 hash = (37 * hash) + CONDITION_FIELD_NUMBER; 12635 hash = (53 * hash) + getCondition().hashCode(); 12636 } 12637 if (hasNonceGroup()) { 12638 hash = (37 * hash) + NONCE_GROUP_FIELD_NUMBER; 12639 hash = (53 * hash) + hashLong(getNonceGroup()); 12640 } 12641 hash = (29 * hash) + getUnknownFields().hashCode(); 12642 memoizedHashCode = hash; 12643 return hash; 12644 } 12645 parseFrom( com.google.protobuf.ByteString data)12646 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( 12647 com.google.protobuf.ByteString data) 12648 throws com.google.protobuf.InvalidProtocolBufferException { 12649 return PARSER.parseFrom(data); 12650 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12651 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( 12652 com.google.protobuf.ByteString data, 12653 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12654 throws com.google.protobuf.InvalidProtocolBufferException { 12655 return PARSER.parseFrom(data, extensionRegistry); 12656 } parseFrom(byte[] data)12657 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(byte[] data) 12658 throws com.google.protobuf.InvalidProtocolBufferException { 12659 return PARSER.parseFrom(data); 12660 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12661 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( 12662 byte[] data, 12663 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12664 throws com.google.protobuf.InvalidProtocolBufferException { 12665 return PARSER.parseFrom(data, extensionRegistry); 12666 } parseFrom(java.io.InputStream input)12667 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom(java.io.InputStream input) 12668 throws java.io.IOException { 12669 return PARSER.parseFrom(input); 12670 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12671 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( 12672 java.io.InputStream input, 12673 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12674 throws java.io.IOException { 12675 return PARSER.parseFrom(input, extensionRegistry); 12676 } parseDelimitedFrom(java.io.InputStream input)12677 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) 12678 throws java.io.IOException { 12679 return PARSER.parseDelimitedFrom(input); 12680 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12681 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseDelimitedFrom( 12682 java.io.InputStream input, 12683 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12684 throws java.io.IOException { 12685 return PARSER.parseDelimitedFrom(input, extensionRegistry); 12686 } parseFrom( com.google.protobuf.CodedInputStream input)12687 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( 12688 com.google.protobuf.CodedInputStream input) 12689 throws java.io.IOException { 12690 return PARSER.parseFrom(input); 12691 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12692 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parseFrom( 12693 com.google.protobuf.CodedInputStream input, 12694 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12695 throws java.io.IOException { 12696 return PARSER.parseFrom(input, extensionRegistry); 12697 } 12698 newBuilder()12699 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()12700 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype)12701 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest prototype) { 12702 return newBuilder().mergeFrom(prototype); 12703 } toBuilder()12704 public Builder toBuilder() { return newBuilder(this); } 12705 12706 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)12707 protected Builder newBuilderForType( 12708 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 12709 Builder builder = new Builder(parent); 12710 return builder; 12711 } 12712 /** 12713 * Protobuf type {@code MutateRequest} 12714 * 12715 * <pre> 12716 ** 12717 * The mutate request. Perform a single Mutate operation. 12718 * 12719 * Optionally, you can specify a condition. The mutate 12720 * will take place only if the condition is met. Otherwise, 12721 * the mutate will be ignored. In the response result, 12722 * parameter processed is used to indicate if the mutate 12723 * actually happened. 12724 * </pre> 12725 */ 12726 public static final class Builder extends 12727 com.google.protobuf.GeneratedMessage.Builder<Builder> 12728 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequestOrBuilder { 12729 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()12730 getDescriptor() { 12731 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; 12732 } 12733 12734 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()12735 internalGetFieldAccessorTable() { 12736 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_fieldAccessorTable 12737 .ensureFieldAccessorsInitialized( 12738 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.Builder.class); 12739 } 12740 12741 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.newBuilder() Builder()12742 private Builder() { 12743 maybeForceBuilderInitialization(); 12744 } 12745 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)12746 private Builder( 12747 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 12748 super(parent); 12749 maybeForceBuilderInitialization(); 12750 } maybeForceBuilderInitialization()12751 private void maybeForceBuilderInitialization() { 12752 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 12753 getRegionFieldBuilder(); 12754 getMutationFieldBuilder(); 12755 getConditionFieldBuilder(); 12756 } 12757 } create()12758 private static Builder create() { 12759 return new Builder(); 12760 } 12761 clear()12762 public Builder clear() { 12763 super.clear(); 12764 if (regionBuilder_ == null) { 12765 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 12766 } else { 12767 regionBuilder_.clear(); 12768 } 12769 bitField0_ = (bitField0_ & ~0x00000001); 12770 if (mutationBuilder_ == null) { 12771 mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); 12772 } else { 12773 mutationBuilder_.clear(); 12774 } 12775 bitField0_ = (bitField0_ & ~0x00000002); 12776 if (conditionBuilder_ == null) { 12777 condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); 12778 } else { 12779 conditionBuilder_.clear(); 12780 } 12781 bitField0_ = (bitField0_ & ~0x00000004); 12782 nonceGroup_ = 0L; 12783 bitField0_ = (bitField0_ & ~0x00000008); 12784 return this; 12785 } 12786 clone()12787 public Builder clone() { 12788 return create().mergeFrom(buildPartial()); 12789 } 12790 12791 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()12792 getDescriptorForType() { 12793 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateRequest_descriptor; 12794 } 12795 getDefaultInstanceForType()12796 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest getDefaultInstanceForType() { 12797 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance(); 12798 } 12799 build()12800 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest build() { 12801 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = buildPartial(); 12802 if (!result.isInitialized()) { 12803 throw newUninitializedMessageException(result); 12804 } 12805 return result; 12806 } 12807 buildPartial()12808 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest buildPartial() { 12809 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest(this); 12810 int from_bitField0_ = bitField0_; 12811 int to_bitField0_ = 0; 12812 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 12813 to_bitField0_ |= 0x00000001; 12814 } 12815 if (regionBuilder_ == null) { 12816 result.region_ = region_; 12817 } else { 12818 result.region_ = regionBuilder_.build(); 12819 } 12820 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 12821 to_bitField0_ |= 0x00000002; 12822 } 12823 if (mutationBuilder_ == null) { 12824 result.mutation_ = mutation_; 12825 } else { 12826 result.mutation_ = mutationBuilder_.build(); 12827 } 12828 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 12829 to_bitField0_ |= 0x00000004; 12830 } 12831 if (conditionBuilder_ == null) { 12832 result.condition_ = condition_; 12833 } else { 12834 result.condition_ = conditionBuilder_.build(); 12835 } 12836 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 12837 to_bitField0_ |= 0x00000008; 12838 } 12839 result.nonceGroup_ = nonceGroup_; 12840 result.bitField0_ = to_bitField0_; 12841 onBuilt(); 12842 return result; 12843 } 12844 mergeFrom(com.google.protobuf.Message other)12845 public Builder mergeFrom(com.google.protobuf.Message other) { 12846 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) { 12847 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest)other); 12848 } else { 12849 super.mergeFrom(other); 12850 return this; 12851 } 12852 } 12853 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other)12854 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest other) { 12855 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest.getDefaultInstance()) return this; 12856 if (other.hasRegion()) { 12857 mergeRegion(other.getRegion()); 12858 } 12859 if (other.hasMutation()) { 12860 mergeMutation(other.getMutation()); 12861 } 12862 if (other.hasCondition()) { 12863 mergeCondition(other.getCondition()); 12864 } 12865 if (other.hasNonceGroup()) { 12866 setNonceGroup(other.getNonceGroup()); 12867 } 12868 this.mergeUnknownFields(other.getUnknownFields()); 12869 return this; 12870 } 12871 isInitialized()12872 public final boolean isInitialized() { 12873 if (!hasRegion()) { 12874 12875 return false; 12876 } 12877 if (!hasMutation()) { 12878 12879 return false; 12880 } 12881 if (!getRegion().isInitialized()) { 12882 12883 return false; 12884 } 12885 if (!getMutation().isInitialized()) { 12886 12887 return false; 12888 } 12889 if (hasCondition()) { 12890 if (!getCondition().isInitialized()) { 12891 12892 return false; 12893 } 12894 } 12895 return true; 12896 } 12897 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12898 public Builder mergeFrom( 12899 com.google.protobuf.CodedInputStream input, 12900 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12901 throws java.io.IOException { 12902 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest parsedMessage = null; 12903 try { 12904 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 12905 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 12906 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest) e.getUnfinishedMessage(); 12907 throw e; 12908 } finally { 12909 if (parsedMessage != null) { 12910 mergeFrom(parsedMessage); 12911 } 12912 } 12913 return this; 12914 } 12915 private int bitField0_; 12916 12917 // required .RegionSpecifier region = 1; 12918 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 12919 private com.google.protobuf.SingleFieldBuilder< 12920 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 12921 /** 12922 * <code>required .RegionSpecifier region = 1;</code> 12923 */ hasRegion()12924 public boolean hasRegion() { 12925 return ((bitField0_ & 0x00000001) == 0x00000001); 12926 } 12927 /** 12928 * <code>required .RegionSpecifier region = 1;</code> 12929 */ getRegion()12930 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 12931 if (regionBuilder_ == null) { 12932 return region_; 12933 } else { 12934 return regionBuilder_.getMessage(); 12935 } 12936 } 12937 /** 12938 * <code>required .RegionSpecifier region = 1;</code> 12939 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)12940 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 12941 if (regionBuilder_ == null) { 12942 if (value == null) { 12943 throw new NullPointerException(); 12944 } 12945 region_ = value; 12946 onChanged(); 12947 } else { 12948 regionBuilder_.setMessage(value); 12949 } 12950 bitField0_ |= 0x00000001; 12951 return this; 12952 } 12953 /** 12954 * <code>required .RegionSpecifier region = 1;</code> 12955 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)12956 public Builder setRegion( 12957 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 12958 if (regionBuilder_ == null) { 12959 region_ = builderForValue.build(); 12960 onChanged(); 12961 } else { 12962 regionBuilder_.setMessage(builderForValue.build()); 12963 } 12964 bitField0_ |= 0x00000001; 12965 return this; 12966 } 12967 /** 12968 * <code>required .RegionSpecifier region = 1;</code> 12969 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)12970 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 12971 if (regionBuilder_ == null) { 12972 if (((bitField0_ & 0x00000001) == 0x00000001) && 12973 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 12974 region_ = 12975 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 12976 } else { 12977 region_ = value; 12978 } 12979 onChanged(); 12980 } else { 12981 regionBuilder_.mergeFrom(value); 12982 } 12983 bitField0_ |= 0x00000001; 12984 return this; 12985 } 12986 /** 12987 * <code>required .RegionSpecifier region = 1;</code> 12988 */ clearRegion()12989 public Builder clearRegion() { 12990 if (regionBuilder_ == null) { 12991 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 12992 onChanged(); 12993 } else { 12994 regionBuilder_.clear(); 12995 } 12996 bitField0_ = (bitField0_ & ~0x00000001); 12997 return this; 12998 } 12999 /** 13000 * <code>required .RegionSpecifier region = 1;</code> 13001 */ getRegionBuilder()13002 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 13003 bitField0_ |= 0x00000001; 13004 onChanged(); 13005 return getRegionFieldBuilder().getBuilder(); 13006 } 13007 /** 13008 * <code>required .RegionSpecifier region = 1;</code> 13009 */ getRegionOrBuilder()13010 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 13011 if (regionBuilder_ != null) { 13012 return regionBuilder_.getMessageOrBuilder(); 13013 } else { 13014 return region_; 13015 } 13016 } 13017 /** 13018 * <code>required .RegionSpecifier region = 1;</code> 13019 */ 13020 private com.google.protobuf.SingleFieldBuilder< 13021 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()13022 getRegionFieldBuilder() { 13023 if (regionBuilder_ == null) { 13024 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 13025 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 13026 region_, 13027 getParentForChildren(), 13028 isClean()); 13029 region_ = null; 13030 } 13031 return regionBuilder_; 13032 } 13033 13034 // required .MutationProto mutation = 2; 13035 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); 13036 private com.google.protobuf.SingleFieldBuilder< 13037 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationBuilder_; 13038 /** 13039 * <code>required .MutationProto mutation = 2;</code> 13040 */ hasMutation()13041 public boolean hasMutation() { 13042 return ((bitField0_ & 0x00000002) == 0x00000002); 13043 } 13044 /** 13045 * <code>required .MutationProto mutation = 2;</code> 13046 */ getMutation()13047 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutation() { 13048 if (mutationBuilder_ == null) { 13049 return mutation_; 13050 } else { 13051 return mutationBuilder_.getMessage(); 13052 } 13053 } 13054 /** 13055 * <code>required .MutationProto mutation = 2;</code> 13056 */ setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value)13057 public Builder setMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { 13058 if (mutationBuilder_ == null) { 13059 if (value == null) { 13060 throw new NullPointerException(); 13061 } 13062 mutation_ = value; 13063 onChanged(); 13064 } else { 13065 mutationBuilder_.setMessage(value); 13066 } 13067 bitField0_ |= 0x00000002; 13068 return this; 13069 } 13070 /** 13071 * <code>required .MutationProto mutation = 2;</code> 13072 */ setMutation( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue)13073 public Builder setMutation( 13074 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) { 13075 if (mutationBuilder_ == null) { 13076 mutation_ = builderForValue.build(); 13077 onChanged(); 13078 } else { 13079 mutationBuilder_.setMessage(builderForValue.build()); 13080 } 13081 bitField0_ |= 0x00000002; 13082 return this; 13083 } 13084 /** 13085 * <code>required .MutationProto mutation = 2;</code> 13086 */ mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value)13087 public Builder mergeMutation(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) { 13088 if (mutationBuilder_ == null) { 13089 if (((bitField0_ & 0x00000002) == 0x00000002) && 13090 mutation_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance()) { 13091 mutation_ = 13092 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder(mutation_).mergeFrom(value).buildPartial(); 13093 } else { 13094 mutation_ = value; 13095 } 13096 onChanged(); 13097 } else { 13098 mutationBuilder_.mergeFrom(value); 13099 } 13100 bitField0_ |= 0x00000002; 13101 return this; 13102 } 13103 /** 13104 * <code>required .MutationProto mutation = 2;</code> 13105 */ clearMutation()13106 public Builder clearMutation() { 13107 if (mutationBuilder_ == null) { 13108 mutation_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance(); 13109 onChanged(); 13110 } else { 13111 mutationBuilder_.clear(); 13112 } 13113 bitField0_ = (bitField0_ & ~0x00000002); 13114 return this; 13115 } 13116 /** 13117 * <code>required .MutationProto mutation = 2;</code> 13118 */ getMutationBuilder()13119 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationBuilder() { 13120 bitField0_ |= 0x00000002; 13121 onChanged(); 13122 return getMutationFieldBuilder().getBuilder(); 13123 } 13124 /** 13125 * <code>required .MutationProto mutation = 2;</code> 13126 */ getMutationOrBuilder()13127 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationOrBuilder() { 13128 if (mutationBuilder_ != null) { 13129 return mutationBuilder_.getMessageOrBuilder(); 13130 } else { 13131 return mutation_; 13132 } 13133 } 13134 /** 13135 * <code>required .MutationProto mutation = 2;</code> 13136 */ 13137 private com.google.protobuf.SingleFieldBuilder< 13138 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> getMutationFieldBuilder()13139 getMutationFieldBuilder() { 13140 if (mutationBuilder_ == null) { 13141 mutationBuilder_ = new com.google.protobuf.SingleFieldBuilder< 13142 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>( 13143 mutation_, 13144 getParentForChildren(), 13145 isClean()); 13146 mutation_ = null; 13147 } 13148 return mutationBuilder_; 13149 } 13150 13151 // optional .Condition condition = 3; 13152 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); 13153 private com.google.protobuf.SingleFieldBuilder< 13154 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> conditionBuilder_; 13155 /** 13156 * <code>optional .Condition condition = 3;</code> 13157 */ hasCondition()13158 public boolean hasCondition() { 13159 return ((bitField0_ & 0x00000004) == 0x00000004); 13160 } 13161 /** 13162 * <code>optional .Condition condition = 3;</code> 13163 */ getCondition()13164 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition getCondition() { 13165 if (conditionBuilder_ == null) { 13166 return condition_; 13167 } else { 13168 return conditionBuilder_.getMessage(); 13169 } 13170 } 13171 /** 13172 * <code>optional .Condition condition = 3;</code> 13173 */ setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value)13174 public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { 13175 if (conditionBuilder_ == null) { 13176 if (value == null) { 13177 throw new NullPointerException(); 13178 } 13179 condition_ = value; 13180 onChanged(); 13181 } else { 13182 conditionBuilder_.setMessage(value); 13183 } 13184 bitField0_ |= 0x00000004; 13185 return this; 13186 } 13187 /** 13188 * <code>optional .Condition condition = 3;</code> 13189 */ setCondition( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue)13190 public Builder setCondition( 13191 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder builderForValue) { 13192 if (conditionBuilder_ == null) { 13193 condition_ = builderForValue.build(); 13194 onChanged(); 13195 } else { 13196 conditionBuilder_.setMessage(builderForValue.build()); 13197 } 13198 bitField0_ |= 0x00000004; 13199 return this; 13200 } 13201 /** 13202 * <code>optional .Condition condition = 3;</code> 13203 */ mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value)13204 public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition value) { 13205 if (conditionBuilder_ == null) { 13206 if (((bitField0_ & 0x00000004) == 0x00000004) && 13207 condition_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance()) { 13208 condition_ = 13209 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); 13210 } else { 13211 condition_ = value; 13212 } 13213 onChanged(); 13214 } else { 13215 conditionBuilder_.mergeFrom(value); 13216 } 13217 bitField0_ |= 0x00000004; 13218 return this; 13219 } 13220 /** 13221 * <code>optional .Condition condition = 3;</code> 13222 */ clearCondition()13223 public Builder clearCondition() { 13224 if (conditionBuilder_ == null) { 13225 condition_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.getDefaultInstance(); 13226 onChanged(); 13227 } else { 13228 conditionBuilder_.clear(); 13229 } 13230 bitField0_ = (bitField0_ & ~0x00000004); 13231 return this; 13232 } 13233 /** 13234 * <code>optional .Condition condition = 3;</code> 13235 */ getConditionBuilder()13236 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder getConditionBuilder() { 13237 bitField0_ |= 0x00000004; 13238 onChanged(); 13239 return getConditionFieldBuilder().getBuilder(); 13240 } 13241 /** 13242 * <code>optional .Condition condition = 3;</code> 13243 */ getConditionOrBuilder()13244 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder getConditionOrBuilder() { 13245 if (conditionBuilder_ != null) { 13246 return conditionBuilder_.getMessageOrBuilder(); 13247 } else { 13248 return condition_; 13249 } 13250 } 13251 /** 13252 * <code>optional .Condition condition = 3;</code> 13253 */ 13254 private com.google.protobuf.SingleFieldBuilder< 13255 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder> getConditionFieldBuilder()13256 getConditionFieldBuilder() { 13257 if (conditionBuilder_ == null) { 13258 conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 13259 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ConditionOrBuilder>( 13260 condition_, 13261 getParentForChildren(), 13262 isClean()); 13263 condition_ = null; 13264 } 13265 return conditionBuilder_; 13266 } 13267 13268 // optional uint64 nonce_group = 4; 13269 private long nonceGroup_ ; 13270 /** 13271 * <code>optional uint64 nonce_group = 4;</code> 13272 */ hasNonceGroup()13273 public boolean hasNonceGroup() { 13274 return ((bitField0_ & 0x00000008) == 0x00000008); 13275 } 13276 /** 13277 * <code>optional uint64 nonce_group = 4;</code> 13278 */ getNonceGroup()13279 public long getNonceGroup() { 13280 return nonceGroup_; 13281 } 13282 /** 13283 * <code>optional uint64 nonce_group = 4;</code> 13284 */ setNonceGroup(long value)13285 public Builder setNonceGroup(long value) { 13286 bitField0_ |= 0x00000008; 13287 nonceGroup_ = value; 13288 onChanged(); 13289 return this; 13290 } 13291 /** 13292 * <code>optional uint64 nonce_group = 4;</code> 13293 */ clearNonceGroup()13294 public Builder clearNonceGroup() { 13295 bitField0_ = (bitField0_ & ~0x00000008); 13296 nonceGroup_ = 0L; 13297 onChanged(); 13298 return this; 13299 } 13300 13301 // @@protoc_insertion_point(builder_scope:MutateRequest) 13302 } 13303 13304 static { 13305 defaultInstance = new MutateRequest(true); defaultInstance.initFields()13306 defaultInstance.initFields(); 13307 } 13308 13309 // @@protoc_insertion_point(class_scope:MutateRequest) 13310 } 13311 13312 public interface MutateResponseOrBuilder 13313 extends com.google.protobuf.MessageOrBuilder { 13314 13315 // optional .Result result = 1; 13316 /** 13317 * <code>optional .Result result = 1;</code> 13318 */ hasResult()13319 boolean hasResult(); 13320 /** 13321 * <code>optional .Result result = 1;</code> 13322 */ getResult()13323 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult(); 13324 /** 13325 * <code>optional .Result result = 1;</code> 13326 */ getResultOrBuilder()13327 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder(); 13328 13329 // optional bool processed = 2; 13330 /** 13331 * <code>optional bool processed = 2;</code> 13332 * 13333 * <pre> 13334 * used for mutate to indicate processed only 13335 * </pre> 13336 */ hasProcessed()13337 boolean hasProcessed(); 13338 /** 13339 * <code>optional bool processed = 2;</code> 13340 * 13341 * <pre> 13342 * used for mutate to indicate processed only 13343 * </pre> 13344 */ getProcessed()13345 boolean getProcessed(); 13346 } 13347 /** 13348 * Protobuf type {@code MutateResponse} 13349 */ 13350 public static final class MutateResponse extends 13351 com.google.protobuf.GeneratedMessage 13352 implements MutateResponseOrBuilder { 13353 // Use MutateResponse.newBuilder() to construct. MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)13354 private MutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 13355 super(builder); 13356 this.unknownFields = builder.getUnknownFields(); 13357 } MutateResponse(boolean noInit)13358 private MutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 13359 13360 private static final MutateResponse defaultInstance; getDefaultInstance()13361 public static MutateResponse getDefaultInstance() { 13362 return defaultInstance; 13363 } 13364 getDefaultInstanceForType()13365 public MutateResponse getDefaultInstanceForType() { 13366 return defaultInstance; 13367 } 13368 13369 private final com.google.protobuf.UnknownFieldSet unknownFields; 13370 @java.lang.Override 13371 public final com.google.protobuf.UnknownFieldSet getUnknownFields()13372 getUnknownFields() { 13373 return this.unknownFields; 13374 } MutateResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13375 private MutateResponse( 13376 com.google.protobuf.CodedInputStream input, 13377 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13378 throws com.google.protobuf.InvalidProtocolBufferException { 13379 initFields(); 13380 int mutable_bitField0_ = 0; 13381 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 13382 com.google.protobuf.UnknownFieldSet.newBuilder(); 13383 try { 13384 boolean done = false; 13385 while (!done) { 13386 int tag = input.readTag(); 13387 switch (tag) { 13388 case 0: 13389 done = true; 13390 break; 13391 default: { 13392 if (!parseUnknownField(input, unknownFields, 13393 extensionRegistry, tag)) { 13394 done = true; 13395 } 13396 break; 13397 } 13398 case 10: { 13399 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder subBuilder = null; 13400 if (((bitField0_ & 0x00000001) == 0x00000001)) { 13401 subBuilder = result_.toBuilder(); 13402 } 13403 result_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry); 13404 if (subBuilder != null) { 13405 subBuilder.mergeFrom(result_); 13406 result_ = subBuilder.buildPartial(); 13407 } 13408 bitField0_ |= 0x00000001; 13409 break; 13410 } 13411 case 16: { 13412 bitField0_ |= 0x00000002; 13413 processed_ = input.readBool(); 13414 break; 13415 } 13416 } 13417 } 13418 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 13419 throw e.setUnfinishedMessage(this); 13420 } catch (java.io.IOException e) { 13421 throw new com.google.protobuf.InvalidProtocolBufferException( 13422 e.getMessage()).setUnfinishedMessage(this); 13423 } finally { 13424 this.unknownFields = unknownFields.build(); 13425 makeExtensionsImmutable(); 13426 } 13427 } 13428 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()13429 getDescriptor() { 13430 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; 13431 } 13432 13433 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()13434 internalGetFieldAccessorTable() { 13435 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable 13436 .ensureFieldAccessorsInitialized( 13437 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); 13438 } 13439 13440 public static com.google.protobuf.Parser<MutateResponse> PARSER = 13441 new com.google.protobuf.AbstractParser<MutateResponse>() { 13442 public MutateResponse parsePartialFrom( 13443 com.google.protobuf.CodedInputStream input, 13444 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13445 throws com.google.protobuf.InvalidProtocolBufferException { 13446 return new MutateResponse(input, extensionRegistry); 13447 } 13448 }; 13449 13450 @java.lang.Override getParserForType()13451 public com.google.protobuf.Parser<MutateResponse> getParserForType() { 13452 return PARSER; 13453 } 13454 13455 private int bitField0_; 13456 // optional .Result result = 1; 13457 public static final int RESULT_FIELD_NUMBER = 1; 13458 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_; 13459 /** 13460 * <code>optional .Result result = 1;</code> 13461 */ hasResult()13462 public boolean hasResult() { 13463 return ((bitField0_ & 0x00000001) == 0x00000001); 13464 } 13465 /** 13466 * <code>optional .Result result = 1;</code> 13467 */ getResult()13468 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { 13469 return result_; 13470 } 13471 /** 13472 * <code>optional .Result result = 1;</code> 13473 */ getResultOrBuilder()13474 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { 13475 return result_; 13476 } 13477 13478 // optional bool processed = 2; 13479 public static final int PROCESSED_FIELD_NUMBER = 2; 13480 private boolean processed_; 13481 /** 13482 * <code>optional bool processed = 2;</code> 13483 * 13484 * <pre> 13485 * used for mutate to indicate processed only 13486 * </pre> 13487 */ hasProcessed()13488 public boolean hasProcessed() { 13489 return ((bitField0_ & 0x00000002) == 0x00000002); 13490 } 13491 /** 13492 * <code>optional bool processed = 2;</code> 13493 * 13494 * <pre> 13495 * used for mutate to indicate processed only 13496 * </pre> 13497 */ getProcessed()13498 public boolean getProcessed() { 13499 return processed_; 13500 } 13501 initFields()13502 private void initFields() { 13503 result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 13504 processed_ = false; 13505 } 13506 private byte memoizedIsInitialized = -1; isInitialized()13507 public final boolean isInitialized() { 13508 byte isInitialized = memoizedIsInitialized; 13509 if (isInitialized != -1) return isInitialized == 1; 13510 13511 memoizedIsInitialized = 1; 13512 return true; 13513 } 13514 writeTo(com.google.protobuf.CodedOutputStream output)13515 public void writeTo(com.google.protobuf.CodedOutputStream output) 13516 throws java.io.IOException { 13517 getSerializedSize(); 13518 if (((bitField0_ & 0x00000001) == 0x00000001)) { 13519 output.writeMessage(1, result_); 13520 } 13521 if (((bitField0_ & 0x00000002) == 0x00000002)) { 13522 output.writeBool(2, processed_); 13523 } 13524 getUnknownFields().writeTo(output); 13525 } 13526 13527 private int memoizedSerializedSize = -1; getSerializedSize()13528 public int getSerializedSize() { 13529 int size = memoizedSerializedSize; 13530 if (size != -1) return size; 13531 13532 size = 0; 13533 if (((bitField0_ & 0x00000001) == 0x00000001)) { 13534 size += com.google.protobuf.CodedOutputStream 13535 .computeMessageSize(1, result_); 13536 } 13537 if (((bitField0_ & 0x00000002) == 0x00000002)) { 13538 size += com.google.protobuf.CodedOutputStream 13539 .computeBoolSize(2, processed_); 13540 } 13541 size += getUnknownFields().getSerializedSize(); 13542 memoizedSerializedSize = size; 13543 return size; 13544 } 13545 13546 private static final long serialVersionUID = 0L; 13547 @java.lang.Override writeReplace()13548 protected java.lang.Object writeReplace() 13549 throws java.io.ObjectStreamException { 13550 return super.writeReplace(); 13551 } 13552 13553 @java.lang.Override equals(final java.lang.Object obj)13554 public boolean equals(final java.lang.Object obj) { 13555 if (obj == this) { 13556 return true; 13557 } 13558 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)) { 13559 return super.equals(obj); 13560 } 13561 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) obj; 13562 13563 boolean result = true; 13564 result = result && (hasResult() == other.hasResult()); 13565 if (hasResult()) { 13566 result = result && getResult() 13567 .equals(other.getResult()); 13568 } 13569 result = result && (hasProcessed() == other.hasProcessed()); 13570 if (hasProcessed()) { 13571 result = result && (getProcessed() 13572 == other.getProcessed()); 13573 } 13574 result = result && 13575 getUnknownFields().equals(other.getUnknownFields()); 13576 return result; 13577 } 13578 13579 private int memoizedHashCode = 0; 13580 @java.lang.Override hashCode()13581 public int hashCode() { 13582 if (memoizedHashCode != 0) { 13583 return memoizedHashCode; 13584 } 13585 int hash = 41; 13586 hash = (19 * hash) + getDescriptorForType().hashCode(); 13587 if (hasResult()) { 13588 hash = (37 * hash) + RESULT_FIELD_NUMBER; 13589 hash = (53 * hash) + getResult().hashCode(); 13590 } 13591 if (hasProcessed()) { 13592 hash = (37 * hash) + PROCESSED_FIELD_NUMBER; 13593 hash = (53 * hash) + hashBoolean(getProcessed()); 13594 } 13595 hash = (29 * hash) + getUnknownFields().hashCode(); 13596 memoizedHashCode = hash; 13597 return hash; 13598 } 13599 parseFrom( com.google.protobuf.ByteString data)13600 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( 13601 com.google.protobuf.ByteString data) 13602 throws com.google.protobuf.InvalidProtocolBufferException { 13603 return PARSER.parseFrom(data); 13604 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13605 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( 13606 com.google.protobuf.ByteString data, 13607 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13608 throws com.google.protobuf.InvalidProtocolBufferException { 13609 return PARSER.parseFrom(data, extensionRegistry); 13610 } parseFrom(byte[] data)13611 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(byte[] data) 13612 throws com.google.protobuf.InvalidProtocolBufferException { 13613 return PARSER.parseFrom(data); 13614 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13615 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( 13616 byte[] data, 13617 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13618 throws com.google.protobuf.InvalidProtocolBufferException { 13619 return PARSER.parseFrom(data, extensionRegistry); 13620 } parseFrom(java.io.InputStream input)13621 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom(java.io.InputStream input) 13622 throws java.io.IOException { 13623 return PARSER.parseFrom(input); 13624 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13625 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( 13626 java.io.InputStream input, 13627 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13628 throws java.io.IOException { 13629 return PARSER.parseFrom(input, extensionRegistry); 13630 } parseDelimitedFrom(java.io.InputStream input)13631 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) 13632 throws java.io.IOException { 13633 return PARSER.parseDelimitedFrom(input); 13634 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13635 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseDelimitedFrom( 13636 java.io.InputStream input, 13637 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13638 throws java.io.IOException { 13639 return PARSER.parseDelimitedFrom(input, extensionRegistry); 13640 } parseFrom( com.google.protobuf.CodedInputStream input)13641 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( 13642 com.google.protobuf.CodedInputStream input) 13643 throws java.io.IOException { 13644 return PARSER.parseFrom(input); 13645 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13646 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parseFrom( 13647 com.google.protobuf.CodedInputStream input, 13648 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13649 throws java.io.IOException { 13650 return PARSER.parseFrom(input, extensionRegistry); 13651 } 13652 newBuilder()13653 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()13654 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype)13655 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse prototype) { 13656 return newBuilder().mergeFrom(prototype); 13657 } toBuilder()13658 public Builder toBuilder() { return newBuilder(this); } 13659 13660 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)13661 protected Builder newBuilderForType( 13662 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 13663 Builder builder = new Builder(parent); 13664 return builder; 13665 } 13666 /** 13667 * Protobuf type {@code MutateResponse} 13668 */ 13669 public static final class Builder extends 13670 com.google.protobuf.GeneratedMessage.Builder<Builder> 13671 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponseOrBuilder { 13672 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()13673 getDescriptor() { 13674 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; 13675 } 13676 13677 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()13678 internalGetFieldAccessorTable() { 13679 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_fieldAccessorTable 13680 .ensureFieldAccessorsInitialized( 13681 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.Builder.class); 13682 } 13683 13684 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.newBuilder() Builder()13685 private Builder() { 13686 maybeForceBuilderInitialization(); 13687 } 13688 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)13689 private Builder( 13690 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 13691 super(parent); 13692 maybeForceBuilderInitialization(); 13693 } maybeForceBuilderInitialization()13694 private void maybeForceBuilderInitialization() { 13695 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 13696 getResultFieldBuilder(); 13697 } 13698 } create()13699 private static Builder create() { 13700 return new Builder(); 13701 } 13702 clear()13703 public Builder clear() { 13704 super.clear(); 13705 if (resultBuilder_ == null) { 13706 result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 13707 } else { 13708 resultBuilder_.clear(); 13709 } 13710 bitField0_ = (bitField0_ & ~0x00000001); 13711 processed_ = false; 13712 bitField0_ = (bitField0_ & ~0x00000002); 13713 return this; 13714 } 13715 clone()13716 public Builder clone() { 13717 return create().mergeFrom(buildPartial()); 13718 } 13719 13720 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()13721 getDescriptorForType() { 13722 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_MutateResponse_descriptor; 13723 } 13724 getDefaultInstanceForType()13725 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse getDefaultInstanceForType() { 13726 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance(); 13727 } 13728 build()13729 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse build() { 13730 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = buildPartial(); 13731 if (!result.isInitialized()) { 13732 throw newUninitializedMessageException(result); 13733 } 13734 return result; 13735 } 13736 buildPartial()13737 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse buildPartial() { 13738 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse(this); 13739 int from_bitField0_ = bitField0_; 13740 int to_bitField0_ = 0; 13741 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 13742 to_bitField0_ |= 0x00000001; 13743 } 13744 if (resultBuilder_ == null) { 13745 result.result_ = result_; 13746 } else { 13747 result.result_ = resultBuilder_.build(); 13748 } 13749 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 13750 to_bitField0_ |= 0x00000002; 13751 } 13752 result.processed_ = processed_; 13753 result.bitField0_ = to_bitField0_; 13754 onBuilt(); 13755 return result; 13756 } 13757 mergeFrom(com.google.protobuf.Message other)13758 public Builder mergeFrom(com.google.protobuf.Message other) { 13759 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) { 13760 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse)other); 13761 } else { 13762 super.mergeFrom(other); 13763 return this; 13764 } 13765 } 13766 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other)13767 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse other) { 13768 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse.getDefaultInstance()) return this; 13769 if (other.hasResult()) { 13770 mergeResult(other.getResult()); 13771 } 13772 if (other.hasProcessed()) { 13773 setProcessed(other.getProcessed()); 13774 } 13775 this.mergeUnknownFields(other.getUnknownFields()); 13776 return this; 13777 } 13778 isInitialized()13779 public final boolean isInitialized() { 13780 return true; 13781 } 13782 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)13783 public Builder mergeFrom( 13784 com.google.protobuf.CodedInputStream input, 13785 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 13786 throws java.io.IOException { 13787 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse parsedMessage = null; 13788 try { 13789 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 13790 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 13791 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse) e.getUnfinishedMessage(); 13792 throw e; 13793 } finally { 13794 if (parsedMessage != null) { 13795 mergeFrom(parsedMessage); 13796 } 13797 } 13798 return this; 13799 } 13800 private int bitField0_; 13801 13802 // optional .Result result = 1; 13803 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 13804 private com.google.protobuf.SingleFieldBuilder< 13805 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> resultBuilder_; 13806 /** 13807 * <code>optional .Result result = 1;</code> 13808 */ hasResult()13809 public boolean hasResult() { 13810 return ((bitField0_ & 0x00000001) == 0x00000001); 13811 } 13812 /** 13813 * <code>optional .Result result = 1;</code> 13814 */ getResult()13815 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResult() { 13816 if (resultBuilder_ == null) { 13817 return result_; 13818 } else { 13819 return resultBuilder_.getMessage(); 13820 } 13821 } 13822 /** 13823 * <code>optional .Result result = 1;</code> 13824 */ setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)13825 public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { 13826 if (resultBuilder_ == null) { 13827 if (value == null) { 13828 throw new NullPointerException(); 13829 } 13830 result_ = value; 13831 onChanged(); 13832 } else { 13833 resultBuilder_.setMessage(value); 13834 } 13835 bitField0_ |= 0x00000001; 13836 return this; 13837 } 13838 /** 13839 * <code>optional .Result result = 1;</code> 13840 */ setResult( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue)13841 public Builder setResult( 13842 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder builderForValue) { 13843 if (resultBuilder_ == null) { 13844 result_ = builderForValue.build(); 13845 onChanged(); 13846 } else { 13847 resultBuilder_.setMessage(builderForValue.build()); 13848 } 13849 bitField0_ |= 0x00000001; 13850 return this; 13851 } 13852 /** 13853 * <code>optional .Result result = 1;</code> 13854 */ mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value)13855 public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result value) { 13856 if (resultBuilder_ == null) { 13857 if (((bitField0_ & 0x00000001) == 0x00000001) && 13858 result_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance()) { 13859 result_ = 13860 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); 13861 } else { 13862 result_ = value; 13863 } 13864 onChanged(); 13865 } else { 13866 resultBuilder_.mergeFrom(value); 13867 } 13868 bitField0_ |= 0x00000001; 13869 return this; 13870 } 13871 /** 13872 * <code>optional .Result result = 1;</code> 13873 */ clearResult()13874 public Builder clearResult() { 13875 if (resultBuilder_ == null) { 13876 result_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.getDefaultInstance(); 13877 onChanged(); 13878 } else { 13879 resultBuilder_.clear(); 13880 } 13881 bitField0_ = (bitField0_ & ~0x00000001); 13882 return this; 13883 } 13884 /** 13885 * <code>optional .Result result = 1;</code> 13886 */ getResultBuilder()13887 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder getResultBuilder() { 13888 bitField0_ |= 0x00000001; 13889 onChanged(); 13890 return getResultFieldBuilder().getBuilder(); 13891 } 13892 /** 13893 * <code>optional .Result result = 1;</code> 13894 */ getResultOrBuilder()13895 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultOrBuilder() { 13896 if (resultBuilder_ != null) { 13897 return resultBuilder_.getMessageOrBuilder(); 13898 } else { 13899 return result_; 13900 } 13901 } 13902 /** 13903 * <code>optional .Result result = 1;</code> 13904 */ 13905 private com.google.protobuf.SingleFieldBuilder< 13906 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultFieldBuilder()13907 getResultFieldBuilder() { 13908 if (resultBuilder_ == null) { 13909 resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< 13910 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder>( 13911 result_, 13912 getParentForChildren(), 13913 isClean()); 13914 result_ = null; 13915 } 13916 return resultBuilder_; 13917 } 13918 13919 // optional bool processed = 2; 13920 private boolean processed_ ; 13921 /** 13922 * <code>optional bool processed = 2;</code> 13923 * 13924 * <pre> 13925 * used for mutate to indicate processed only 13926 * </pre> 13927 */ hasProcessed()13928 public boolean hasProcessed() { 13929 return ((bitField0_ & 0x00000002) == 0x00000002); 13930 } 13931 /** 13932 * <code>optional bool processed = 2;</code> 13933 * 13934 * <pre> 13935 * used for mutate to indicate processed only 13936 * </pre> 13937 */ getProcessed()13938 public boolean getProcessed() { 13939 return processed_; 13940 } 13941 /** 13942 * <code>optional bool processed = 2;</code> 13943 * 13944 * <pre> 13945 * used for mutate to indicate processed only 13946 * </pre> 13947 */ setProcessed(boolean value)13948 public Builder setProcessed(boolean value) { 13949 bitField0_ |= 0x00000002; 13950 processed_ = value; 13951 onChanged(); 13952 return this; 13953 } 13954 /** 13955 * <code>optional bool processed = 2;</code> 13956 * 13957 * <pre> 13958 * used for mutate to indicate processed only 13959 * </pre> 13960 */ clearProcessed()13961 public Builder clearProcessed() { 13962 bitField0_ = (bitField0_ & ~0x00000002); 13963 processed_ = false; 13964 onChanged(); 13965 return this; 13966 } 13967 13968 // @@protoc_insertion_point(builder_scope:MutateResponse) 13969 } 13970 13971 static { 13972 defaultInstance = new MutateResponse(true); defaultInstance.initFields()13973 defaultInstance.initFields(); 13974 } 13975 13976 // @@protoc_insertion_point(class_scope:MutateResponse) 13977 } 13978 13979 public interface ScanOrBuilder 13980 extends com.google.protobuf.MessageOrBuilder { 13981 13982 // repeated .Column column = 1; 13983 /** 13984 * <code>repeated .Column column = 1;</code> 13985 */ 13986 java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList()13987 getColumnList(); 13988 /** 13989 * <code>repeated .Column column = 1;</code> 13990 */ getColumn(int index)13991 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index); 13992 /** 13993 * <code>repeated .Column column = 1;</code> 13994 */ getColumnCount()13995 int getColumnCount(); 13996 /** 13997 * <code>repeated .Column column = 1;</code> 13998 */ 13999 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList()14000 getColumnOrBuilderList(); 14001 /** 14002 * <code>repeated .Column column = 1;</code> 14003 */ getColumnOrBuilder( int index)14004 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( 14005 int index); 14006 14007 // repeated .NameBytesPair attribute = 2; 14008 /** 14009 * <code>repeated .NameBytesPair attribute = 2;</code> 14010 */ 14011 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList()14012 getAttributeList(); 14013 /** 14014 * <code>repeated .NameBytesPair attribute = 2;</code> 14015 */ getAttribute(int index)14016 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index); 14017 /** 14018 * <code>repeated .NameBytesPair attribute = 2;</code> 14019 */ getAttributeCount()14020 int getAttributeCount(); 14021 /** 14022 * <code>repeated .NameBytesPair attribute = 2;</code> 14023 */ 14024 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()14025 getAttributeOrBuilderList(); 14026 /** 14027 * <code>repeated .NameBytesPair attribute = 2;</code> 14028 */ getAttributeOrBuilder( int index)14029 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 14030 int index); 14031 14032 // optional bytes start_row = 3; 14033 /** 14034 * <code>optional bytes start_row = 3;</code> 14035 */ hasStartRow()14036 boolean hasStartRow(); 14037 /** 14038 * <code>optional bytes start_row = 3;</code> 14039 */ getStartRow()14040 com.google.protobuf.ByteString getStartRow(); 14041 14042 // optional bytes stop_row = 4; 14043 /** 14044 * <code>optional bytes stop_row = 4;</code> 14045 */ hasStopRow()14046 boolean hasStopRow(); 14047 /** 14048 * <code>optional bytes stop_row = 4;</code> 14049 */ getStopRow()14050 com.google.protobuf.ByteString getStopRow(); 14051 14052 // optional .Filter filter = 5; 14053 /** 14054 * <code>optional .Filter filter = 5;</code> 14055 */ hasFilter()14056 boolean hasFilter(); 14057 /** 14058 * <code>optional .Filter filter = 5;</code> 14059 */ getFilter()14060 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter(); 14061 /** 14062 * <code>optional .Filter filter = 5;</code> 14063 */ getFilterOrBuilder()14064 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); 14065 14066 // optional .TimeRange time_range = 6; 14067 /** 14068 * <code>optional .TimeRange time_range = 6;</code> 14069 */ hasTimeRange()14070 boolean hasTimeRange(); 14071 /** 14072 * <code>optional .TimeRange time_range = 6;</code> 14073 */ getTimeRange()14074 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); 14075 /** 14076 * <code>optional .TimeRange time_range = 6;</code> 14077 */ getTimeRangeOrBuilder()14078 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); 14079 14080 // optional uint32 max_versions = 7 [default = 1]; 14081 /** 14082 * <code>optional uint32 max_versions = 7 [default = 1];</code> 14083 */ hasMaxVersions()14084 boolean hasMaxVersions(); 14085 /** 14086 * <code>optional uint32 max_versions = 7 [default = 1];</code> 14087 */ getMaxVersions()14088 int getMaxVersions(); 14089 14090 // optional bool cache_blocks = 8 [default = true]; 14091 /** 14092 * <code>optional bool cache_blocks = 8 [default = true];</code> 14093 */ hasCacheBlocks()14094 boolean hasCacheBlocks(); 14095 /** 14096 * <code>optional bool cache_blocks = 8 [default = true];</code> 14097 */ getCacheBlocks()14098 boolean getCacheBlocks(); 14099 14100 // optional uint32 batch_size = 9; 14101 /** 14102 * <code>optional uint32 batch_size = 9;</code> 14103 */ hasBatchSize()14104 boolean hasBatchSize(); 14105 /** 14106 * <code>optional uint32 batch_size = 9;</code> 14107 */ getBatchSize()14108 int getBatchSize(); 14109 14110 // optional uint64 max_result_size = 10; 14111 /** 14112 * <code>optional uint64 max_result_size = 10;</code> 14113 */ hasMaxResultSize()14114 boolean hasMaxResultSize(); 14115 /** 14116 * <code>optional uint64 max_result_size = 10;</code> 14117 */ getMaxResultSize()14118 long getMaxResultSize(); 14119 14120 // optional uint32 store_limit = 11; 14121 /** 14122 * <code>optional uint32 store_limit = 11;</code> 14123 */ hasStoreLimit()14124 boolean hasStoreLimit(); 14125 /** 14126 * <code>optional uint32 store_limit = 11;</code> 14127 */ getStoreLimit()14128 int getStoreLimit(); 14129 14130 // optional uint32 store_offset = 12; 14131 /** 14132 * <code>optional uint32 store_offset = 12;</code> 14133 */ hasStoreOffset()14134 boolean hasStoreOffset(); 14135 /** 14136 * <code>optional uint32 store_offset = 12;</code> 14137 */ getStoreOffset()14138 int getStoreOffset(); 14139 14140 // optional bool load_column_families_on_demand = 13; 14141 /** 14142 * <code>optional bool load_column_families_on_demand = 13;</code> 14143 * 14144 * <pre> 14145 * DO NOT add defaults to load_column_families_on_demand. 14146 * </pre> 14147 */ hasLoadColumnFamiliesOnDemand()14148 boolean hasLoadColumnFamiliesOnDemand(); 14149 /** 14150 * <code>optional bool load_column_families_on_demand = 13;</code> 14151 * 14152 * <pre> 14153 * DO NOT add defaults to load_column_families_on_demand. 14154 * </pre> 14155 */ getLoadColumnFamiliesOnDemand()14156 boolean getLoadColumnFamiliesOnDemand(); 14157 14158 // optional bool small = 14; 14159 /** 14160 * <code>optional bool small = 14;</code> 14161 */ hasSmall()14162 boolean hasSmall(); 14163 /** 14164 * <code>optional bool small = 14;</code> 14165 */ getSmall()14166 boolean getSmall(); 14167 14168 // optional bool reversed = 15 [default = false]; 14169 /** 14170 * <code>optional bool reversed = 15 [default = false];</code> 14171 */ hasReversed()14172 boolean hasReversed(); 14173 /** 14174 * <code>optional bool reversed = 15 [default = false];</code> 14175 */ getReversed()14176 boolean getReversed(); 14177 14178 // optional .Consistency consistency = 16 [default = STRONG]; 14179 /** 14180 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 14181 */ hasConsistency()14182 boolean hasConsistency(); 14183 /** 14184 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 14185 */ getConsistency()14186 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency(); 14187 14188 // optional uint32 caching = 17; 14189 /** 14190 * <code>optional uint32 caching = 17;</code> 14191 */ hasCaching()14192 boolean hasCaching(); 14193 /** 14194 * <code>optional uint32 caching = 17;</code> 14195 */ getCaching()14196 int getCaching(); 14197 14198 // optional bool allow_partial_results = 18; 14199 /** 14200 * <code>optional bool allow_partial_results = 18;</code> 14201 */ hasAllowPartialResults()14202 boolean hasAllowPartialResults(); 14203 /** 14204 * <code>optional bool allow_partial_results = 18;</code> 14205 */ getAllowPartialResults()14206 boolean getAllowPartialResults(); 14207 14208 // repeated .ColumnFamilyTimeRange cf_time_range = 19; 14209 /** 14210 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14211 */ 14212 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList()14213 getCfTimeRangeList(); 14214 /** 14215 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14216 */ getCfTimeRange(int index)14217 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index); 14218 /** 14219 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14220 */ getCfTimeRangeCount()14221 int getCfTimeRangeCount(); 14222 /** 14223 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14224 */ 14225 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList()14226 getCfTimeRangeOrBuilderList(); 14227 /** 14228 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14229 */ getCfTimeRangeOrBuilder( int index)14230 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( 14231 int index); 14232 } 14233 /** 14234 * Protobuf type {@code Scan} 14235 * 14236 * <pre> 14237 ** 14238 * Instead of get from a table, you can scan it with optional filters. 14239 * You can specify the row key range, time range, the columns/families 14240 * to scan and so on. 14241 * 14242 * This scan is used the first time in a scan request. The response of 14243 * the initial scan will return a scanner id, which should be used to 14244 * fetch result batches later on before it is closed. 14245 * </pre> 14246 */ 14247 public static final class Scan extends 14248 com.google.protobuf.GeneratedMessage 14249 implements ScanOrBuilder { 14250 // Use Scan.newBuilder() to construct. Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder)14251 private Scan(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 14252 super(builder); 14253 this.unknownFields = builder.getUnknownFields(); 14254 } Scan(boolean noInit)14255 private Scan(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 14256 14257 private static final Scan defaultInstance; getDefaultInstance()14258 public static Scan getDefaultInstance() { 14259 return defaultInstance; 14260 } 14261 getDefaultInstanceForType()14262 public Scan getDefaultInstanceForType() { 14263 return defaultInstance; 14264 } 14265 14266 private final com.google.protobuf.UnknownFieldSet unknownFields; 14267 @java.lang.Override 14268 public final com.google.protobuf.UnknownFieldSet getUnknownFields()14269 getUnknownFields() { 14270 return this.unknownFields; 14271 } Scan( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)14272 private Scan( 14273 com.google.protobuf.CodedInputStream input, 14274 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 14275 throws com.google.protobuf.InvalidProtocolBufferException { 14276 initFields(); 14277 int mutable_bitField0_ = 0; 14278 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 14279 com.google.protobuf.UnknownFieldSet.newBuilder(); 14280 try { 14281 boolean done = false; 14282 while (!done) { 14283 int tag = input.readTag(); 14284 switch (tag) { 14285 case 0: 14286 done = true; 14287 break; 14288 default: { 14289 if (!parseUnknownField(input, unknownFields, 14290 extensionRegistry, tag)) { 14291 done = true; 14292 } 14293 break; 14294 } 14295 case 10: { 14296 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 14297 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(); 14298 mutable_bitField0_ |= 0x00000001; 14299 } 14300 column_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.PARSER, extensionRegistry)); 14301 break; 14302 } 14303 case 18: { 14304 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 14305 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(); 14306 mutable_bitField0_ |= 0x00000002; 14307 } 14308 attribute_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.PARSER, extensionRegistry)); 14309 break; 14310 } 14311 case 26: { 14312 bitField0_ |= 0x00000001; 14313 startRow_ = input.readBytes(); 14314 break; 14315 } 14316 case 34: { 14317 bitField0_ |= 0x00000002; 14318 stopRow_ = input.readBytes(); 14319 break; 14320 } 14321 case 42: { 14322 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; 14323 if (((bitField0_ & 0x00000004) == 0x00000004)) { 14324 subBuilder = filter_.toBuilder(); 14325 } 14326 filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); 14327 if (subBuilder != null) { 14328 subBuilder.mergeFrom(filter_); 14329 filter_ = subBuilder.buildPartial(); 14330 } 14331 bitField0_ |= 0x00000004; 14332 break; 14333 } 14334 case 50: { 14335 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; 14336 if (((bitField0_ & 0x00000008) == 0x00000008)) { 14337 subBuilder = timeRange_.toBuilder(); 14338 } 14339 timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); 14340 if (subBuilder != null) { 14341 subBuilder.mergeFrom(timeRange_); 14342 timeRange_ = subBuilder.buildPartial(); 14343 } 14344 bitField0_ |= 0x00000008; 14345 break; 14346 } 14347 case 56: { 14348 bitField0_ |= 0x00000010; 14349 maxVersions_ = input.readUInt32(); 14350 break; 14351 } 14352 case 64: { 14353 bitField0_ |= 0x00000020; 14354 cacheBlocks_ = input.readBool(); 14355 break; 14356 } 14357 case 72: { 14358 bitField0_ |= 0x00000040; 14359 batchSize_ = input.readUInt32(); 14360 break; 14361 } 14362 case 80: { 14363 bitField0_ |= 0x00000080; 14364 maxResultSize_ = input.readUInt64(); 14365 break; 14366 } 14367 case 88: { 14368 bitField0_ |= 0x00000100; 14369 storeLimit_ = input.readUInt32(); 14370 break; 14371 } 14372 case 96: { 14373 bitField0_ |= 0x00000200; 14374 storeOffset_ = input.readUInt32(); 14375 break; 14376 } 14377 case 104: { 14378 bitField0_ |= 0x00000400; 14379 loadColumnFamiliesOnDemand_ = input.readBool(); 14380 break; 14381 } 14382 case 112: { 14383 bitField0_ |= 0x00000800; 14384 small_ = input.readBool(); 14385 break; 14386 } 14387 case 120: { 14388 bitField0_ |= 0x00001000; 14389 reversed_ = input.readBool(); 14390 break; 14391 } 14392 case 128: { 14393 int rawValue = input.readEnum(); 14394 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.valueOf(rawValue); 14395 if (value == null) { 14396 unknownFields.mergeVarintField(16, rawValue); 14397 } else { 14398 bitField0_ |= 0x00002000; 14399 consistency_ = value; 14400 } 14401 break; 14402 } 14403 case 136: { 14404 bitField0_ |= 0x00004000; 14405 caching_ = input.readUInt32(); 14406 break; 14407 } 14408 case 144: { 14409 bitField0_ |= 0x00008000; 14410 allowPartialResults_ = input.readBool(); 14411 break; 14412 } 14413 case 154: { 14414 if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) { 14415 cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(); 14416 mutable_bitField0_ |= 0x00040000; 14417 } 14418 cfTimeRange_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.PARSER, extensionRegistry)); 14419 break; 14420 } 14421 } 14422 } 14423 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 14424 throw e.setUnfinishedMessage(this); 14425 } catch (java.io.IOException e) { 14426 throw new com.google.protobuf.InvalidProtocolBufferException( 14427 e.getMessage()).setUnfinishedMessage(this); 14428 } finally { 14429 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 14430 column_ = java.util.Collections.unmodifiableList(column_); 14431 } 14432 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 14433 attribute_ = java.util.Collections.unmodifiableList(attribute_); 14434 } 14435 if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) { 14436 cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); 14437 } 14438 this.unknownFields = unknownFields.build(); 14439 makeExtensionsImmutable(); 14440 } 14441 } 14442 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()14443 getDescriptor() { 14444 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; 14445 } 14446 14447 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()14448 internalGetFieldAccessorTable() { 14449 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable 14450 .ensureFieldAccessorsInitialized( 14451 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); 14452 } 14453 14454 public static com.google.protobuf.Parser<Scan> PARSER = 14455 new com.google.protobuf.AbstractParser<Scan>() { 14456 public Scan parsePartialFrom( 14457 com.google.protobuf.CodedInputStream input, 14458 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 14459 throws com.google.protobuf.InvalidProtocolBufferException { 14460 return new Scan(input, extensionRegistry); 14461 } 14462 }; 14463 14464 @java.lang.Override getParserForType()14465 public com.google.protobuf.Parser<Scan> getParserForType() { 14466 return PARSER; 14467 } 14468 14469 private int bitField0_; 14470 // repeated .Column column = 1; 14471 public static final int COLUMN_FIELD_NUMBER = 1; 14472 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_; 14473 /** 14474 * <code>repeated .Column column = 1;</code> 14475 */ getColumnList()14476 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() { 14477 return column_; 14478 } 14479 /** 14480 * <code>repeated .Column column = 1;</code> 14481 */ 14482 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList()14483 getColumnOrBuilderList() { 14484 return column_; 14485 } 14486 /** 14487 * <code>repeated .Column column = 1;</code> 14488 */ getColumnCount()14489 public int getColumnCount() { 14490 return column_.size(); 14491 } 14492 /** 14493 * <code>repeated .Column column = 1;</code> 14494 */ getColumn(int index)14495 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { 14496 return column_.get(index); 14497 } 14498 /** 14499 * <code>repeated .Column column = 1;</code> 14500 */ getColumnOrBuilder( int index)14501 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( 14502 int index) { 14503 return column_.get(index); 14504 } 14505 14506 // repeated .NameBytesPair attribute = 2; 14507 public static final int ATTRIBUTE_FIELD_NUMBER = 2; 14508 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_; 14509 /** 14510 * <code>repeated .NameBytesPair attribute = 2;</code> 14511 */ getAttributeList()14512 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { 14513 return attribute_; 14514 } 14515 /** 14516 * <code>repeated .NameBytesPair attribute = 2;</code> 14517 */ 14518 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()14519 getAttributeOrBuilderList() { 14520 return attribute_; 14521 } 14522 /** 14523 * <code>repeated .NameBytesPair attribute = 2;</code> 14524 */ getAttributeCount()14525 public int getAttributeCount() { 14526 return attribute_.size(); 14527 } 14528 /** 14529 * <code>repeated .NameBytesPair attribute = 2;</code> 14530 */ getAttribute(int index)14531 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { 14532 return attribute_.get(index); 14533 } 14534 /** 14535 * <code>repeated .NameBytesPair attribute = 2;</code> 14536 */ getAttributeOrBuilder( int index)14537 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 14538 int index) { 14539 return attribute_.get(index); 14540 } 14541 14542 // optional bytes start_row = 3; 14543 public static final int START_ROW_FIELD_NUMBER = 3; 14544 private com.google.protobuf.ByteString startRow_; 14545 /** 14546 * <code>optional bytes start_row = 3;</code> 14547 */ hasStartRow()14548 public boolean hasStartRow() { 14549 return ((bitField0_ & 0x00000001) == 0x00000001); 14550 } 14551 /** 14552 * <code>optional bytes start_row = 3;</code> 14553 */ getStartRow()14554 public com.google.protobuf.ByteString getStartRow() { 14555 return startRow_; 14556 } 14557 14558 // optional bytes stop_row = 4; 14559 public static final int STOP_ROW_FIELD_NUMBER = 4; 14560 private com.google.protobuf.ByteString stopRow_; 14561 /** 14562 * <code>optional bytes stop_row = 4;</code> 14563 */ hasStopRow()14564 public boolean hasStopRow() { 14565 return ((bitField0_ & 0x00000002) == 0x00000002); 14566 } 14567 /** 14568 * <code>optional bytes stop_row = 4;</code> 14569 */ getStopRow()14570 public com.google.protobuf.ByteString getStopRow() { 14571 return stopRow_; 14572 } 14573 14574 // optional .Filter filter = 5; 14575 public static final int FILTER_FIELD_NUMBER = 5; 14576 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_; 14577 /** 14578 * <code>optional .Filter filter = 5;</code> 14579 */ hasFilter()14580 public boolean hasFilter() { 14581 return ((bitField0_ & 0x00000004) == 0x00000004); 14582 } 14583 /** 14584 * <code>optional .Filter filter = 5;</code> 14585 */ getFilter()14586 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { 14587 return filter_; 14588 } 14589 /** 14590 * <code>optional .Filter filter = 5;</code> 14591 */ getFilterOrBuilder()14592 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { 14593 return filter_; 14594 } 14595 14596 // optional .TimeRange time_range = 6; 14597 public static final int TIME_RANGE_FIELD_NUMBER = 6; 14598 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; 14599 /** 14600 * <code>optional .TimeRange time_range = 6;</code> 14601 */ hasTimeRange()14602 public boolean hasTimeRange() { 14603 return ((bitField0_ & 0x00000008) == 0x00000008); 14604 } 14605 /** 14606 * <code>optional .TimeRange time_range = 6;</code> 14607 */ getTimeRange()14608 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 14609 return timeRange_; 14610 } 14611 /** 14612 * <code>optional .TimeRange time_range = 6;</code> 14613 */ getTimeRangeOrBuilder()14614 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 14615 return timeRange_; 14616 } 14617 14618 // optional uint32 max_versions = 7 [default = 1]; 14619 public static final int MAX_VERSIONS_FIELD_NUMBER = 7; 14620 private int maxVersions_; 14621 /** 14622 * <code>optional uint32 max_versions = 7 [default = 1];</code> 14623 */ hasMaxVersions()14624 public boolean hasMaxVersions() { 14625 return ((bitField0_ & 0x00000010) == 0x00000010); 14626 } 14627 /** 14628 * <code>optional uint32 max_versions = 7 [default = 1];</code> 14629 */ getMaxVersions()14630 public int getMaxVersions() { 14631 return maxVersions_; 14632 } 14633 14634 // optional bool cache_blocks = 8 [default = true]; 14635 public static final int CACHE_BLOCKS_FIELD_NUMBER = 8; 14636 private boolean cacheBlocks_; 14637 /** 14638 * <code>optional bool cache_blocks = 8 [default = true];</code> 14639 */ hasCacheBlocks()14640 public boolean hasCacheBlocks() { 14641 return ((bitField0_ & 0x00000020) == 0x00000020); 14642 } 14643 /** 14644 * <code>optional bool cache_blocks = 8 [default = true];</code> 14645 */ getCacheBlocks()14646 public boolean getCacheBlocks() { 14647 return cacheBlocks_; 14648 } 14649 14650 // optional uint32 batch_size = 9; 14651 public static final int BATCH_SIZE_FIELD_NUMBER = 9; 14652 private int batchSize_; 14653 /** 14654 * <code>optional uint32 batch_size = 9;</code> 14655 */ hasBatchSize()14656 public boolean hasBatchSize() { 14657 return ((bitField0_ & 0x00000040) == 0x00000040); 14658 } 14659 /** 14660 * <code>optional uint32 batch_size = 9;</code> 14661 */ getBatchSize()14662 public int getBatchSize() { 14663 return batchSize_; 14664 } 14665 14666 // optional uint64 max_result_size = 10; 14667 public static final int MAX_RESULT_SIZE_FIELD_NUMBER = 10; 14668 private long maxResultSize_; 14669 /** 14670 * <code>optional uint64 max_result_size = 10;</code> 14671 */ hasMaxResultSize()14672 public boolean hasMaxResultSize() { 14673 return ((bitField0_ & 0x00000080) == 0x00000080); 14674 } 14675 /** 14676 * <code>optional uint64 max_result_size = 10;</code> 14677 */ getMaxResultSize()14678 public long getMaxResultSize() { 14679 return maxResultSize_; 14680 } 14681 14682 // optional uint32 store_limit = 11; 14683 public static final int STORE_LIMIT_FIELD_NUMBER = 11; 14684 private int storeLimit_; 14685 /** 14686 * <code>optional uint32 store_limit = 11;</code> 14687 */ hasStoreLimit()14688 public boolean hasStoreLimit() { 14689 return ((bitField0_ & 0x00000100) == 0x00000100); 14690 } 14691 /** 14692 * <code>optional uint32 store_limit = 11;</code> 14693 */ getStoreLimit()14694 public int getStoreLimit() { 14695 return storeLimit_; 14696 } 14697 14698 // optional uint32 store_offset = 12; 14699 public static final int STORE_OFFSET_FIELD_NUMBER = 12; 14700 private int storeOffset_; 14701 /** 14702 * <code>optional uint32 store_offset = 12;</code> 14703 */ hasStoreOffset()14704 public boolean hasStoreOffset() { 14705 return ((bitField0_ & 0x00000200) == 0x00000200); 14706 } 14707 /** 14708 * <code>optional uint32 store_offset = 12;</code> 14709 */ getStoreOffset()14710 public int getStoreOffset() { 14711 return storeOffset_; 14712 } 14713 14714 // optional bool load_column_families_on_demand = 13; 14715 public static final int LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER = 13; 14716 private boolean loadColumnFamiliesOnDemand_; 14717 /** 14718 * <code>optional bool load_column_families_on_demand = 13;</code> 14719 * 14720 * <pre> 14721 * DO NOT add defaults to load_column_families_on_demand. 14722 * </pre> 14723 */ hasLoadColumnFamiliesOnDemand()14724 public boolean hasLoadColumnFamiliesOnDemand() { 14725 return ((bitField0_ & 0x00000400) == 0x00000400); 14726 } 14727 /** 14728 * <code>optional bool load_column_families_on_demand = 13;</code> 14729 * 14730 * <pre> 14731 * DO NOT add defaults to load_column_families_on_demand. 14732 * </pre> 14733 */ getLoadColumnFamiliesOnDemand()14734 public boolean getLoadColumnFamiliesOnDemand() { 14735 return loadColumnFamiliesOnDemand_; 14736 } 14737 14738 // optional bool small = 14; 14739 public static final int SMALL_FIELD_NUMBER = 14; 14740 private boolean small_; 14741 /** 14742 * <code>optional bool small = 14;</code> 14743 */ hasSmall()14744 public boolean hasSmall() { 14745 return ((bitField0_ & 0x00000800) == 0x00000800); 14746 } 14747 /** 14748 * <code>optional bool small = 14;</code> 14749 */ getSmall()14750 public boolean getSmall() { 14751 return small_; 14752 } 14753 14754 // optional bool reversed = 15 [default = false]; 14755 public static final int REVERSED_FIELD_NUMBER = 15; 14756 private boolean reversed_; 14757 /** 14758 * <code>optional bool reversed = 15 [default = false];</code> 14759 */ hasReversed()14760 public boolean hasReversed() { 14761 return ((bitField0_ & 0x00001000) == 0x00001000); 14762 } 14763 /** 14764 * <code>optional bool reversed = 15 [default = false];</code> 14765 */ getReversed()14766 public boolean getReversed() { 14767 return reversed_; 14768 } 14769 14770 // optional .Consistency consistency = 16 [default = STRONG]; 14771 public static final int CONSISTENCY_FIELD_NUMBER = 16; 14772 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_; 14773 /** 14774 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 14775 */ hasConsistency()14776 public boolean hasConsistency() { 14777 return ((bitField0_ & 0x00002000) == 0x00002000); 14778 } 14779 /** 14780 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 14781 */ getConsistency()14782 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() { 14783 return consistency_; 14784 } 14785 14786 // optional uint32 caching = 17; 14787 public static final int CACHING_FIELD_NUMBER = 17; 14788 private int caching_; 14789 /** 14790 * <code>optional uint32 caching = 17;</code> 14791 */ hasCaching()14792 public boolean hasCaching() { 14793 return ((bitField0_ & 0x00004000) == 0x00004000); 14794 } 14795 /** 14796 * <code>optional uint32 caching = 17;</code> 14797 */ getCaching()14798 public int getCaching() { 14799 return caching_; 14800 } 14801 14802 // optional bool allow_partial_results = 18; 14803 public static final int ALLOW_PARTIAL_RESULTS_FIELD_NUMBER = 18; 14804 private boolean allowPartialResults_; 14805 /** 14806 * <code>optional bool allow_partial_results = 18;</code> 14807 */ hasAllowPartialResults()14808 public boolean hasAllowPartialResults() { 14809 return ((bitField0_ & 0x00008000) == 0x00008000); 14810 } 14811 /** 14812 * <code>optional bool allow_partial_results = 18;</code> 14813 */ getAllowPartialResults()14814 public boolean getAllowPartialResults() { 14815 return allowPartialResults_; 14816 } 14817 14818 // repeated .ColumnFamilyTimeRange cf_time_range = 19; 14819 public static final int CF_TIME_RANGE_FIELD_NUMBER = 19; 14820 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_; 14821 /** 14822 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14823 */ getCfTimeRangeList()14824 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { 14825 return cfTimeRange_; 14826 } 14827 /** 14828 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14829 */ 14830 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList()14831 getCfTimeRangeOrBuilderList() { 14832 return cfTimeRange_; 14833 } 14834 /** 14835 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14836 */ getCfTimeRangeCount()14837 public int getCfTimeRangeCount() { 14838 return cfTimeRange_.size(); 14839 } 14840 /** 14841 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14842 */ getCfTimeRange(int index)14843 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { 14844 return cfTimeRange_.get(index); 14845 } 14846 /** 14847 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 14848 */ getCfTimeRangeOrBuilder( int index)14849 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( 14850 int index) { 14851 return cfTimeRange_.get(index); 14852 } 14853 initFields()14854 private void initFields() { 14855 column_ = java.util.Collections.emptyList(); 14856 attribute_ = java.util.Collections.emptyList(); 14857 startRow_ = com.google.protobuf.ByteString.EMPTY; 14858 stopRow_ = com.google.protobuf.ByteString.EMPTY; 14859 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 14860 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 14861 maxVersions_ = 1; 14862 cacheBlocks_ = true; 14863 batchSize_ = 0; 14864 maxResultSize_ = 0L; 14865 storeLimit_ = 0; 14866 storeOffset_ = 0; 14867 loadColumnFamiliesOnDemand_ = false; 14868 small_ = false; 14869 reversed_ = false; 14870 consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 14871 caching_ = 0; 14872 allowPartialResults_ = false; 14873 cfTimeRange_ = java.util.Collections.emptyList(); 14874 } 14875 private byte memoizedIsInitialized = -1; isInitialized()14876 public final boolean isInitialized() { 14877 byte isInitialized = memoizedIsInitialized; 14878 if (isInitialized != -1) return isInitialized == 1; 14879 14880 for (int i = 0; i < getColumnCount(); i++) { 14881 if (!getColumn(i).isInitialized()) { 14882 memoizedIsInitialized = 0; 14883 return false; 14884 } 14885 } 14886 for (int i = 0; i < getAttributeCount(); i++) { 14887 if (!getAttribute(i).isInitialized()) { 14888 memoizedIsInitialized = 0; 14889 return false; 14890 } 14891 } 14892 if (hasFilter()) { 14893 if (!getFilter().isInitialized()) { 14894 memoizedIsInitialized = 0; 14895 return false; 14896 } 14897 } 14898 for (int i = 0; i < getCfTimeRangeCount(); i++) { 14899 if (!getCfTimeRange(i).isInitialized()) { 14900 memoizedIsInitialized = 0; 14901 return false; 14902 } 14903 } 14904 memoizedIsInitialized = 1; 14905 return true; 14906 } 14907 writeTo(com.google.protobuf.CodedOutputStream output)14908 public void writeTo(com.google.protobuf.CodedOutputStream output) 14909 throws java.io.IOException { 14910 getSerializedSize(); 14911 for (int i = 0; i < column_.size(); i++) { 14912 output.writeMessage(1, column_.get(i)); 14913 } 14914 for (int i = 0; i < attribute_.size(); i++) { 14915 output.writeMessage(2, attribute_.get(i)); 14916 } 14917 if (((bitField0_ & 0x00000001) == 0x00000001)) { 14918 output.writeBytes(3, startRow_); 14919 } 14920 if (((bitField0_ & 0x00000002) == 0x00000002)) { 14921 output.writeBytes(4, stopRow_); 14922 } 14923 if (((bitField0_ & 0x00000004) == 0x00000004)) { 14924 output.writeMessage(5, filter_); 14925 } 14926 if (((bitField0_ & 0x00000008) == 0x00000008)) { 14927 output.writeMessage(6, timeRange_); 14928 } 14929 if (((bitField0_ & 0x00000010) == 0x00000010)) { 14930 output.writeUInt32(7, maxVersions_); 14931 } 14932 if (((bitField0_ & 0x00000020) == 0x00000020)) { 14933 output.writeBool(8, cacheBlocks_); 14934 } 14935 if (((bitField0_ & 0x00000040) == 0x00000040)) { 14936 output.writeUInt32(9, batchSize_); 14937 } 14938 if (((bitField0_ & 0x00000080) == 0x00000080)) { 14939 output.writeUInt64(10, maxResultSize_); 14940 } 14941 if (((bitField0_ & 0x00000100) == 0x00000100)) { 14942 output.writeUInt32(11, storeLimit_); 14943 } 14944 if (((bitField0_ & 0x00000200) == 0x00000200)) { 14945 output.writeUInt32(12, storeOffset_); 14946 } 14947 if (((bitField0_ & 0x00000400) == 0x00000400)) { 14948 output.writeBool(13, loadColumnFamiliesOnDemand_); 14949 } 14950 if (((bitField0_ & 0x00000800) == 0x00000800)) { 14951 output.writeBool(14, small_); 14952 } 14953 if (((bitField0_ & 0x00001000) == 0x00001000)) { 14954 output.writeBool(15, reversed_); 14955 } 14956 if (((bitField0_ & 0x00002000) == 0x00002000)) { 14957 output.writeEnum(16, consistency_.getNumber()); 14958 } 14959 if (((bitField0_ & 0x00004000) == 0x00004000)) { 14960 output.writeUInt32(17, caching_); 14961 } 14962 if (((bitField0_ & 0x00008000) == 0x00008000)) { 14963 output.writeBool(18, allowPartialResults_); 14964 } 14965 for (int i = 0; i < cfTimeRange_.size(); i++) { 14966 output.writeMessage(19, cfTimeRange_.get(i)); 14967 } 14968 getUnknownFields().writeTo(output); 14969 } 14970 14971 private int memoizedSerializedSize = -1; getSerializedSize()14972 public int getSerializedSize() { 14973 int size = memoizedSerializedSize; 14974 if (size != -1) return size; 14975 14976 size = 0; 14977 for (int i = 0; i < column_.size(); i++) { 14978 size += com.google.protobuf.CodedOutputStream 14979 .computeMessageSize(1, column_.get(i)); 14980 } 14981 for (int i = 0; i < attribute_.size(); i++) { 14982 size += com.google.protobuf.CodedOutputStream 14983 .computeMessageSize(2, attribute_.get(i)); 14984 } 14985 if (((bitField0_ & 0x00000001) == 0x00000001)) { 14986 size += com.google.protobuf.CodedOutputStream 14987 .computeBytesSize(3, startRow_); 14988 } 14989 if (((bitField0_ & 0x00000002) == 0x00000002)) { 14990 size += com.google.protobuf.CodedOutputStream 14991 .computeBytesSize(4, stopRow_); 14992 } 14993 if (((bitField0_ & 0x00000004) == 0x00000004)) { 14994 size += com.google.protobuf.CodedOutputStream 14995 .computeMessageSize(5, filter_); 14996 } 14997 if (((bitField0_ & 0x00000008) == 0x00000008)) { 14998 size += com.google.protobuf.CodedOutputStream 14999 .computeMessageSize(6, timeRange_); 15000 } 15001 if (((bitField0_ & 0x00000010) == 0x00000010)) { 15002 size += com.google.protobuf.CodedOutputStream 15003 .computeUInt32Size(7, maxVersions_); 15004 } 15005 if (((bitField0_ & 0x00000020) == 0x00000020)) { 15006 size += com.google.protobuf.CodedOutputStream 15007 .computeBoolSize(8, cacheBlocks_); 15008 } 15009 if (((bitField0_ & 0x00000040) == 0x00000040)) { 15010 size += com.google.protobuf.CodedOutputStream 15011 .computeUInt32Size(9, batchSize_); 15012 } 15013 if (((bitField0_ & 0x00000080) == 0x00000080)) { 15014 size += com.google.protobuf.CodedOutputStream 15015 .computeUInt64Size(10, maxResultSize_); 15016 } 15017 if (((bitField0_ & 0x00000100) == 0x00000100)) { 15018 size += com.google.protobuf.CodedOutputStream 15019 .computeUInt32Size(11, storeLimit_); 15020 } 15021 if (((bitField0_ & 0x00000200) == 0x00000200)) { 15022 size += com.google.protobuf.CodedOutputStream 15023 .computeUInt32Size(12, storeOffset_); 15024 } 15025 if (((bitField0_ & 0x00000400) == 0x00000400)) { 15026 size += com.google.protobuf.CodedOutputStream 15027 .computeBoolSize(13, loadColumnFamiliesOnDemand_); 15028 } 15029 if (((bitField0_ & 0x00000800) == 0x00000800)) { 15030 size += com.google.protobuf.CodedOutputStream 15031 .computeBoolSize(14, small_); 15032 } 15033 if (((bitField0_ & 0x00001000) == 0x00001000)) { 15034 size += com.google.protobuf.CodedOutputStream 15035 .computeBoolSize(15, reversed_); 15036 } 15037 if (((bitField0_ & 0x00002000) == 0x00002000)) { 15038 size += com.google.protobuf.CodedOutputStream 15039 .computeEnumSize(16, consistency_.getNumber()); 15040 } 15041 if (((bitField0_ & 0x00004000) == 0x00004000)) { 15042 size += com.google.protobuf.CodedOutputStream 15043 .computeUInt32Size(17, caching_); 15044 } 15045 if (((bitField0_ & 0x00008000) == 0x00008000)) { 15046 size += com.google.protobuf.CodedOutputStream 15047 .computeBoolSize(18, allowPartialResults_); 15048 } 15049 for (int i = 0; i < cfTimeRange_.size(); i++) { 15050 size += com.google.protobuf.CodedOutputStream 15051 .computeMessageSize(19, cfTimeRange_.get(i)); 15052 } 15053 size += getUnknownFields().getSerializedSize(); 15054 memoizedSerializedSize = size; 15055 return size; 15056 } 15057 15058 private static final long serialVersionUID = 0L; 15059 @java.lang.Override writeReplace()15060 protected java.lang.Object writeReplace() 15061 throws java.io.ObjectStreamException { 15062 return super.writeReplace(); 15063 } 15064 15065 @java.lang.Override equals(final java.lang.Object obj)15066 public boolean equals(final java.lang.Object obj) { 15067 if (obj == this) { 15068 return true; 15069 } 15070 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)) { 15071 return super.equals(obj); 15072 } 15073 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) obj; 15074 15075 boolean result = true; 15076 result = result && getColumnList() 15077 .equals(other.getColumnList()); 15078 result = result && getAttributeList() 15079 .equals(other.getAttributeList()); 15080 result = result && (hasStartRow() == other.hasStartRow()); 15081 if (hasStartRow()) { 15082 result = result && getStartRow() 15083 .equals(other.getStartRow()); 15084 } 15085 result = result && (hasStopRow() == other.hasStopRow()); 15086 if (hasStopRow()) { 15087 result = result && getStopRow() 15088 .equals(other.getStopRow()); 15089 } 15090 result = result && (hasFilter() == other.hasFilter()); 15091 if (hasFilter()) { 15092 result = result && getFilter() 15093 .equals(other.getFilter()); 15094 } 15095 result = result && (hasTimeRange() == other.hasTimeRange()); 15096 if (hasTimeRange()) { 15097 result = result && getTimeRange() 15098 .equals(other.getTimeRange()); 15099 } 15100 result = result && (hasMaxVersions() == other.hasMaxVersions()); 15101 if (hasMaxVersions()) { 15102 result = result && (getMaxVersions() 15103 == other.getMaxVersions()); 15104 } 15105 result = result && (hasCacheBlocks() == other.hasCacheBlocks()); 15106 if (hasCacheBlocks()) { 15107 result = result && (getCacheBlocks() 15108 == other.getCacheBlocks()); 15109 } 15110 result = result && (hasBatchSize() == other.hasBatchSize()); 15111 if (hasBatchSize()) { 15112 result = result && (getBatchSize() 15113 == other.getBatchSize()); 15114 } 15115 result = result && (hasMaxResultSize() == other.hasMaxResultSize()); 15116 if (hasMaxResultSize()) { 15117 result = result && (getMaxResultSize() 15118 == other.getMaxResultSize()); 15119 } 15120 result = result && (hasStoreLimit() == other.hasStoreLimit()); 15121 if (hasStoreLimit()) { 15122 result = result && (getStoreLimit() 15123 == other.getStoreLimit()); 15124 } 15125 result = result && (hasStoreOffset() == other.hasStoreOffset()); 15126 if (hasStoreOffset()) { 15127 result = result && (getStoreOffset() 15128 == other.getStoreOffset()); 15129 } 15130 result = result && (hasLoadColumnFamiliesOnDemand() == other.hasLoadColumnFamiliesOnDemand()); 15131 if (hasLoadColumnFamiliesOnDemand()) { 15132 result = result && (getLoadColumnFamiliesOnDemand() 15133 == other.getLoadColumnFamiliesOnDemand()); 15134 } 15135 result = result && (hasSmall() == other.hasSmall()); 15136 if (hasSmall()) { 15137 result = result && (getSmall() 15138 == other.getSmall()); 15139 } 15140 result = result && (hasReversed() == other.hasReversed()); 15141 if (hasReversed()) { 15142 result = result && (getReversed() 15143 == other.getReversed()); 15144 } 15145 result = result && (hasConsistency() == other.hasConsistency()); 15146 if (hasConsistency()) { 15147 result = result && 15148 (getConsistency() == other.getConsistency()); 15149 } 15150 result = result && (hasCaching() == other.hasCaching()); 15151 if (hasCaching()) { 15152 result = result && (getCaching() 15153 == other.getCaching()); 15154 } 15155 result = result && (hasAllowPartialResults() == other.hasAllowPartialResults()); 15156 if (hasAllowPartialResults()) { 15157 result = result && (getAllowPartialResults() 15158 == other.getAllowPartialResults()); 15159 } 15160 result = result && getCfTimeRangeList() 15161 .equals(other.getCfTimeRangeList()); 15162 result = result && 15163 getUnknownFields().equals(other.getUnknownFields()); 15164 return result; 15165 } 15166 15167 private int memoizedHashCode = 0; 15168 @java.lang.Override hashCode()15169 public int hashCode() { 15170 if (memoizedHashCode != 0) { 15171 return memoizedHashCode; 15172 } 15173 int hash = 41; 15174 hash = (19 * hash) + getDescriptorForType().hashCode(); 15175 if (getColumnCount() > 0) { 15176 hash = (37 * hash) + COLUMN_FIELD_NUMBER; 15177 hash = (53 * hash) + getColumnList().hashCode(); 15178 } 15179 if (getAttributeCount() > 0) { 15180 hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; 15181 hash = (53 * hash) + getAttributeList().hashCode(); 15182 } 15183 if (hasStartRow()) { 15184 hash = (37 * hash) + START_ROW_FIELD_NUMBER; 15185 hash = (53 * hash) + getStartRow().hashCode(); 15186 } 15187 if (hasStopRow()) { 15188 hash = (37 * hash) + STOP_ROW_FIELD_NUMBER; 15189 hash = (53 * hash) + getStopRow().hashCode(); 15190 } 15191 if (hasFilter()) { 15192 hash = (37 * hash) + FILTER_FIELD_NUMBER; 15193 hash = (53 * hash) + getFilter().hashCode(); 15194 } 15195 if (hasTimeRange()) { 15196 hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; 15197 hash = (53 * hash) + getTimeRange().hashCode(); 15198 } 15199 if (hasMaxVersions()) { 15200 hash = (37 * hash) + MAX_VERSIONS_FIELD_NUMBER; 15201 hash = (53 * hash) + getMaxVersions(); 15202 } 15203 if (hasCacheBlocks()) { 15204 hash = (37 * hash) + CACHE_BLOCKS_FIELD_NUMBER; 15205 hash = (53 * hash) + hashBoolean(getCacheBlocks()); 15206 } 15207 if (hasBatchSize()) { 15208 hash = (37 * hash) + BATCH_SIZE_FIELD_NUMBER; 15209 hash = (53 * hash) + getBatchSize(); 15210 } 15211 if (hasMaxResultSize()) { 15212 hash = (37 * hash) + MAX_RESULT_SIZE_FIELD_NUMBER; 15213 hash = (53 * hash) + hashLong(getMaxResultSize()); 15214 } 15215 if (hasStoreLimit()) { 15216 hash = (37 * hash) + STORE_LIMIT_FIELD_NUMBER; 15217 hash = (53 * hash) + getStoreLimit(); 15218 } 15219 if (hasStoreOffset()) { 15220 hash = (37 * hash) + STORE_OFFSET_FIELD_NUMBER; 15221 hash = (53 * hash) + getStoreOffset(); 15222 } 15223 if (hasLoadColumnFamiliesOnDemand()) { 15224 hash = (37 * hash) + LOAD_COLUMN_FAMILIES_ON_DEMAND_FIELD_NUMBER; 15225 hash = (53 * hash) + hashBoolean(getLoadColumnFamiliesOnDemand()); 15226 } 15227 if (hasSmall()) { 15228 hash = (37 * hash) + SMALL_FIELD_NUMBER; 15229 hash = (53 * hash) + hashBoolean(getSmall()); 15230 } 15231 if (hasReversed()) { 15232 hash = (37 * hash) + REVERSED_FIELD_NUMBER; 15233 hash = (53 * hash) + hashBoolean(getReversed()); 15234 } 15235 if (hasConsistency()) { 15236 hash = (37 * hash) + CONSISTENCY_FIELD_NUMBER; 15237 hash = (53 * hash) + hashEnum(getConsistency()); 15238 } 15239 if (hasCaching()) { 15240 hash = (37 * hash) + CACHING_FIELD_NUMBER; 15241 hash = (53 * hash) + getCaching(); 15242 } 15243 if (hasAllowPartialResults()) { 15244 hash = (37 * hash) + ALLOW_PARTIAL_RESULTS_FIELD_NUMBER; 15245 hash = (53 * hash) + hashBoolean(getAllowPartialResults()); 15246 } 15247 if (getCfTimeRangeCount() > 0) { 15248 hash = (37 * hash) + CF_TIME_RANGE_FIELD_NUMBER; 15249 hash = (53 * hash) + getCfTimeRangeList().hashCode(); 15250 } 15251 hash = (29 * hash) + getUnknownFields().hashCode(); 15252 memoizedHashCode = hash; 15253 return hash; 15254 } 15255 parseFrom( com.google.protobuf.ByteString data)15256 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( 15257 com.google.protobuf.ByteString data) 15258 throws com.google.protobuf.InvalidProtocolBufferException { 15259 return PARSER.parseFrom(data); 15260 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15261 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( 15262 com.google.protobuf.ByteString data, 15263 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 15264 throws com.google.protobuf.InvalidProtocolBufferException { 15265 return PARSER.parseFrom(data, extensionRegistry); 15266 } parseFrom(byte[] data)15267 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(byte[] data) 15268 throws com.google.protobuf.InvalidProtocolBufferException { 15269 return PARSER.parseFrom(data); 15270 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15271 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( 15272 byte[] data, 15273 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 15274 throws com.google.protobuf.InvalidProtocolBufferException { 15275 return PARSER.parseFrom(data, extensionRegistry); 15276 } parseFrom(java.io.InputStream input)15277 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom(java.io.InputStream input) 15278 throws java.io.IOException { 15279 return PARSER.parseFrom(input); 15280 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15281 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( 15282 java.io.InputStream input, 15283 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 15284 throws java.io.IOException { 15285 return PARSER.parseFrom(input, extensionRegistry); 15286 } parseDelimitedFrom(java.io.InputStream input)15287 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) 15288 throws java.io.IOException { 15289 return PARSER.parseDelimitedFrom(input); 15290 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15291 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseDelimitedFrom( 15292 java.io.InputStream input, 15293 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 15294 throws java.io.IOException { 15295 return PARSER.parseDelimitedFrom(input, extensionRegistry); 15296 } parseFrom( com.google.protobuf.CodedInputStream input)15297 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( 15298 com.google.protobuf.CodedInputStream input) 15299 throws java.io.IOException { 15300 return PARSER.parseFrom(input); 15301 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15302 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parseFrom( 15303 com.google.protobuf.CodedInputStream input, 15304 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 15305 throws java.io.IOException { 15306 return PARSER.parseFrom(input, extensionRegistry); 15307 } 15308 newBuilder()15309 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()15310 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype)15311 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan prototype) { 15312 return newBuilder().mergeFrom(prototype); 15313 } toBuilder()15314 public Builder toBuilder() { return newBuilder(this); } 15315 15316 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)15317 protected Builder newBuilderForType( 15318 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 15319 Builder builder = new Builder(parent); 15320 return builder; 15321 } 15322 /** 15323 * Protobuf type {@code Scan} 15324 * 15325 * <pre> 15326 ** 15327 * Instead of get from a table, you can scan it with optional filters. 15328 * You can specify the row key range, time range, the columns/families 15329 * to scan and so on. 15330 * 15331 * This scan is used the first time in a scan request. The response of 15332 * the initial scan will return a scanner id, which should be used to 15333 * fetch result batches later on before it is closed. 15334 * </pre> 15335 */ 15336 public static final class Builder extends 15337 com.google.protobuf.GeneratedMessage.Builder<Builder> 15338 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder { 15339 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()15340 getDescriptor() { 15341 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; 15342 } 15343 15344 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()15345 internalGetFieldAccessorTable() { 15346 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_fieldAccessorTable 15347 .ensureFieldAccessorsInitialized( 15348 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder.class); 15349 } 15350 15351 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder() Builder()15352 private Builder() { 15353 maybeForceBuilderInitialization(); 15354 } 15355 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)15356 private Builder( 15357 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 15358 super(parent); 15359 maybeForceBuilderInitialization(); 15360 } maybeForceBuilderInitialization()15361 private void maybeForceBuilderInitialization() { 15362 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 15363 getColumnFieldBuilder(); 15364 getAttributeFieldBuilder(); 15365 getFilterFieldBuilder(); 15366 getTimeRangeFieldBuilder(); 15367 getCfTimeRangeFieldBuilder(); 15368 } 15369 } create()15370 private static Builder create() { 15371 return new Builder(); 15372 } 15373 clear()15374 public Builder clear() { 15375 super.clear(); 15376 if (columnBuilder_ == null) { 15377 column_ = java.util.Collections.emptyList(); 15378 bitField0_ = (bitField0_ & ~0x00000001); 15379 } else { 15380 columnBuilder_.clear(); 15381 } 15382 if (attributeBuilder_ == null) { 15383 attribute_ = java.util.Collections.emptyList(); 15384 bitField0_ = (bitField0_ & ~0x00000002); 15385 } else { 15386 attributeBuilder_.clear(); 15387 } 15388 startRow_ = com.google.protobuf.ByteString.EMPTY; 15389 bitField0_ = (bitField0_ & ~0x00000004); 15390 stopRow_ = com.google.protobuf.ByteString.EMPTY; 15391 bitField0_ = (bitField0_ & ~0x00000008); 15392 if (filterBuilder_ == null) { 15393 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 15394 } else { 15395 filterBuilder_.clear(); 15396 } 15397 bitField0_ = (bitField0_ & ~0x00000010); 15398 if (timeRangeBuilder_ == null) { 15399 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 15400 } else { 15401 timeRangeBuilder_.clear(); 15402 } 15403 bitField0_ = (bitField0_ & ~0x00000020); 15404 maxVersions_ = 1; 15405 bitField0_ = (bitField0_ & ~0x00000040); 15406 cacheBlocks_ = true; 15407 bitField0_ = (bitField0_ & ~0x00000080); 15408 batchSize_ = 0; 15409 bitField0_ = (bitField0_ & ~0x00000100); 15410 maxResultSize_ = 0L; 15411 bitField0_ = (bitField0_ & ~0x00000200); 15412 storeLimit_ = 0; 15413 bitField0_ = (bitField0_ & ~0x00000400); 15414 storeOffset_ = 0; 15415 bitField0_ = (bitField0_ & ~0x00000800); 15416 loadColumnFamiliesOnDemand_ = false; 15417 bitField0_ = (bitField0_ & ~0x00001000); 15418 small_ = false; 15419 bitField0_ = (bitField0_ & ~0x00002000); 15420 reversed_ = false; 15421 bitField0_ = (bitField0_ & ~0x00004000); 15422 consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 15423 bitField0_ = (bitField0_ & ~0x00008000); 15424 caching_ = 0; 15425 bitField0_ = (bitField0_ & ~0x00010000); 15426 allowPartialResults_ = false; 15427 bitField0_ = (bitField0_ & ~0x00020000); 15428 if (cfTimeRangeBuilder_ == null) { 15429 cfTimeRange_ = java.util.Collections.emptyList(); 15430 bitField0_ = (bitField0_ & ~0x00040000); 15431 } else { 15432 cfTimeRangeBuilder_.clear(); 15433 } 15434 return this; 15435 } 15436 clone()15437 public Builder clone() { 15438 return create().mergeFrom(buildPartial()); 15439 } 15440 15441 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()15442 getDescriptorForType() { 15443 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Scan_descriptor; 15444 } 15445 getDefaultInstanceForType()15446 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getDefaultInstanceForType() { 15447 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); 15448 } 15449 build()15450 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan build() { 15451 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = buildPartial(); 15452 if (!result.isInitialized()) { 15453 throw newUninitializedMessageException(result); 15454 } 15455 return result; 15456 } 15457 buildPartial()15458 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan buildPartial() { 15459 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan(this); 15460 int from_bitField0_ = bitField0_; 15461 int to_bitField0_ = 0; 15462 if (columnBuilder_ == null) { 15463 if (((bitField0_ & 0x00000001) == 0x00000001)) { 15464 column_ = java.util.Collections.unmodifiableList(column_); 15465 bitField0_ = (bitField0_ & ~0x00000001); 15466 } 15467 result.column_ = column_; 15468 } else { 15469 result.column_ = columnBuilder_.build(); 15470 } 15471 if (attributeBuilder_ == null) { 15472 if (((bitField0_ & 0x00000002) == 0x00000002)) { 15473 attribute_ = java.util.Collections.unmodifiableList(attribute_); 15474 bitField0_ = (bitField0_ & ~0x00000002); 15475 } 15476 result.attribute_ = attribute_; 15477 } else { 15478 result.attribute_ = attributeBuilder_.build(); 15479 } 15480 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 15481 to_bitField0_ |= 0x00000001; 15482 } 15483 result.startRow_ = startRow_; 15484 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 15485 to_bitField0_ |= 0x00000002; 15486 } 15487 result.stopRow_ = stopRow_; 15488 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 15489 to_bitField0_ |= 0x00000004; 15490 } 15491 if (filterBuilder_ == null) { 15492 result.filter_ = filter_; 15493 } else { 15494 result.filter_ = filterBuilder_.build(); 15495 } 15496 if (((from_bitField0_ & 0x00000020) == 0x00000020)) { 15497 to_bitField0_ |= 0x00000008; 15498 } 15499 if (timeRangeBuilder_ == null) { 15500 result.timeRange_ = timeRange_; 15501 } else { 15502 result.timeRange_ = timeRangeBuilder_.build(); 15503 } 15504 if (((from_bitField0_ & 0x00000040) == 0x00000040)) { 15505 to_bitField0_ |= 0x00000010; 15506 } 15507 result.maxVersions_ = maxVersions_; 15508 if (((from_bitField0_ & 0x00000080) == 0x00000080)) { 15509 to_bitField0_ |= 0x00000020; 15510 } 15511 result.cacheBlocks_ = cacheBlocks_; 15512 if (((from_bitField0_ & 0x00000100) == 0x00000100)) { 15513 to_bitField0_ |= 0x00000040; 15514 } 15515 result.batchSize_ = batchSize_; 15516 if (((from_bitField0_ & 0x00000200) == 0x00000200)) { 15517 to_bitField0_ |= 0x00000080; 15518 } 15519 result.maxResultSize_ = maxResultSize_; 15520 if (((from_bitField0_ & 0x00000400) == 0x00000400)) { 15521 to_bitField0_ |= 0x00000100; 15522 } 15523 result.storeLimit_ = storeLimit_; 15524 if (((from_bitField0_ & 0x00000800) == 0x00000800)) { 15525 to_bitField0_ |= 0x00000200; 15526 } 15527 result.storeOffset_ = storeOffset_; 15528 if (((from_bitField0_ & 0x00001000) == 0x00001000)) { 15529 to_bitField0_ |= 0x00000400; 15530 } 15531 result.loadColumnFamiliesOnDemand_ = loadColumnFamiliesOnDemand_; 15532 if (((from_bitField0_ & 0x00002000) == 0x00002000)) { 15533 to_bitField0_ |= 0x00000800; 15534 } 15535 result.small_ = small_; 15536 if (((from_bitField0_ & 0x00004000) == 0x00004000)) { 15537 to_bitField0_ |= 0x00001000; 15538 } 15539 result.reversed_ = reversed_; 15540 if (((from_bitField0_ & 0x00008000) == 0x00008000)) { 15541 to_bitField0_ |= 0x00002000; 15542 } 15543 result.consistency_ = consistency_; 15544 if (((from_bitField0_ & 0x00010000) == 0x00010000)) { 15545 to_bitField0_ |= 0x00004000; 15546 } 15547 result.caching_ = caching_; 15548 if (((from_bitField0_ & 0x00020000) == 0x00020000)) { 15549 to_bitField0_ |= 0x00008000; 15550 } 15551 result.allowPartialResults_ = allowPartialResults_; 15552 if (cfTimeRangeBuilder_ == null) { 15553 if (((bitField0_ & 0x00040000) == 0x00040000)) { 15554 cfTimeRange_ = java.util.Collections.unmodifiableList(cfTimeRange_); 15555 bitField0_ = (bitField0_ & ~0x00040000); 15556 } 15557 result.cfTimeRange_ = cfTimeRange_; 15558 } else { 15559 result.cfTimeRange_ = cfTimeRangeBuilder_.build(); 15560 } 15561 result.bitField0_ = to_bitField0_; 15562 onBuilt(); 15563 return result; 15564 } 15565 mergeFrom(com.google.protobuf.Message other)15566 public Builder mergeFrom(com.google.protobuf.Message other) { 15567 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) { 15568 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan)other); 15569 } else { 15570 super.mergeFrom(other); 15571 return this; 15572 } 15573 } 15574 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other)15575 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan other) { 15576 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) return this; 15577 if (columnBuilder_ == null) { 15578 if (!other.column_.isEmpty()) { 15579 if (column_.isEmpty()) { 15580 column_ = other.column_; 15581 bitField0_ = (bitField0_ & ~0x00000001); 15582 } else { 15583 ensureColumnIsMutable(); 15584 column_.addAll(other.column_); 15585 } 15586 onChanged(); 15587 } 15588 } else { 15589 if (!other.column_.isEmpty()) { 15590 if (columnBuilder_.isEmpty()) { 15591 columnBuilder_.dispose(); 15592 columnBuilder_ = null; 15593 column_ = other.column_; 15594 bitField0_ = (bitField0_ & ~0x00000001); 15595 columnBuilder_ = 15596 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 15597 getColumnFieldBuilder() : null; 15598 } else { 15599 columnBuilder_.addAllMessages(other.column_); 15600 } 15601 } 15602 } 15603 if (attributeBuilder_ == null) { 15604 if (!other.attribute_.isEmpty()) { 15605 if (attribute_.isEmpty()) { 15606 attribute_ = other.attribute_; 15607 bitField0_ = (bitField0_ & ~0x00000002); 15608 } else { 15609 ensureAttributeIsMutable(); 15610 attribute_.addAll(other.attribute_); 15611 } 15612 onChanged(); 15613 } 15614 } else { 15615 if (!other.attribute_.isEmpty()) { 15616 if (attributeBuilder_.isEmpty()) { 15617 attributeBuilder_.dispose(); 15618 attributeBuilder_ = null; 15619 attribute_ = other.attribute_; 15620 bitField0_ = (bitField0_ & ~0x00000002); 15621 attributeBuilder_ = 15622 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 15623 getAttributeFieldBuilder() : null; 15624 } else { 15625 attributeBuilder_.addAllMessages(other.attribute_); 15626 } 15627 } 15628 } 15629 if (other.hasStartRow()) { 15630 setStartRow(other.getStartRow()); 15631 } 15632 if (other.hasStopRow()) { 15633 setStopRow(other.getStopRow()); 15634 } 15635 if (other.hasFilter()) { 15636 mergeFilter(other.getFilter()); 15637 } 15638 if (other.hasTimeRange()) { 15639 mergeTimeRange(other.getTimeRange()); 15640 } 15641 if (other.hasMaxVersions()) { 15642 setMaxVersions(other.getMaxVersions()); 15643 } 15644 if (other.hasCacheBlocks()) { 15645 setCacheBlocks(other.getCacheBlocks()); 15646 } 15647 if (other.hasBatchSize()) { 15648 setBatchSize(other.getBatchSize()); 15649 } 15650 if (other.hasMaxResultSize()) { 15651 setMaxResultSize(other.getMaxResultSize()); 15652 } 15653 if (other.hasStoreLimit()) { 15654 setStoreLimit(other.getStoreLimit()); 15655 } 15656 if (other.hasStoreOffset()) { 15657 setStoreOffset(other.getStoreOffset()); 15658 } 15659 if (other.hasLoadColumnFamiliesOnDemand()) { 15660 setLoadColumnFamiliesOnDemand(other.getLoadColumnFamiliesOnDemand()); 15661 } 15662 if (other.hasSmall()) { 15663 setSmall(other.getSmall()); 15664 } 15665 if (other.hasReversed()) { 15666 setReversed(other.getReversed()); 15667 } 15668 if (other.hasConsistency()) { 15669 setConsistency(other.getConsistency()); 15670 } 15671 if (other.hasCaching()) { 15672 setCaching(other.getCaching()); 15673 } 15674 if (other.hasAllowPartialResults()) { 15675 setAllowPartialResults(other.getAllowPartialResults()); 15676 } 15677 if (cfTimeRangeBuilder_ == null) { 15678 if (!other.cfTimeRange_.isEmpty()) { 15679 if (cfTimeRange_.isEmpty()) { 15680 cfTimeRange_ = other.cfTimeRange_; 15681 bitField0_ = (bitField0_ & ~0x00040000); 15682 } else { 15683 ensureCfTimeRangeIsMutable(); 15684 cfTimeRange_.addAll(other.cfTimeRange_); 15685 } 15686 onChanged(); 15687 } 15688 } else { 15689 if (!other.cfTimeRange_.isEmpty()) { 15690 if (cfTimeRangeBuilder_.isEmpty()) { 15691 cfTimeRangeBuilder_.dispose(); 15692 cfTimeRangeBuilder_ = null; 15693 cfTimeRange_ = other.cfTimeRange_; 15694 bitField0_ = (bitField0_ & ~0x00040000); 15695 cfTimeRangeBuilder_ = 15696 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 15697 getCfTimeRangeFieldBuilder() : null; 15698 } else { 15699 cfTimeRangeBuilder_.addAllMessages(other.cfTimeRange_); 15700 } 15701 } 15702 } 15703 this.mergeUnknownFields(other.getUnknownFields()); 15704 return this; 15705 } 15706 isInitialized()15707 public final boolean isInitialized() { 15708 for (int i = 0; i < getColumnCount(); i++) { 15709 if (!getColumn(i).isInitialized()) { 15710 15711 return false; 15712 } 15713 } 15714 for (int i = 0; i < getAttributeCount(); i++) { 15715 if (!getAttribute(i).isInitialized()) { 15716 15717 return false; 15718 } 15719 } 15720 if (hasFilter()) { 15721 if (!getFilter().isInitialized()) { 15722 15723 return false; 15724 } 15725 } 15726 for (int i = 0; i < getCfTimeRangeCount(); i++) { 15727 if (!getCfTimeRange(i).isInitialized()) { 15728 15729 return false; 15730 } 15731 } 15732 return true; 15733 } 15734 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)15735 public Builder mergeFrom( 15736 com.google.protobuf.CodedInputStream input, 15737 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 15738 throws java.io.IOException { 15739 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan parsedMessage = null; 15740 try { 15741 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 15742 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 15743 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan) e.getUnfinishedMessage(); 15744 throw e; 15745 } finally { 15746 if (parsedMessage != null) { 15747 mergeFrom(parsedMessage); 15748 } 15749 } 15750 return this; 15751 } 15752 private int bitField0_; 15753 15754 // repeated .Column column = 1; 15755 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> column_ = 15756 java.util.Collections.emptyList(); ensureColumnIsMutable()15757 private void ensureColumnIsMutable() { 15758 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 15759 column_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column>(column_); 15760 bitField0_ |= 0x00000001; 15761 } 15762 } 15763 15764 private com.google.protobuf.RepeatedFieldBuilder< 15765 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> columnBuilder_; 15766 15767 /** 15768 * <code>repeated .Column column = 1;</code> 15769 */ getColumnList()15770 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> getColumnList() { 15771 if (columnBuilder_ == null) { 15772 return java.util.Collections.unmodifiableList(column_); 15773 } else { 15774 return columnBuilder_.getMessageList(); 15775 } 15776 } 15777 /** 15778 * <code>repeated .Column column = 1;</code> 15779 */ getColumnCount()15780 public int getColumnCount() { 15781 if (columnBuilder_ == null) { 15782 return column_.size(); 15783 } else { 15784 return columnBuilder_.getCount(); 15785 } 15786 } 15787 /** 15788 * <code>repeated .Column column = 1;</code> 15789 */ getColumn(int index)15790 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column getColumn(int index) { 15791 if (columnBuilder_ == null) { 15792 return column_.get(index); 15793 } else { 15794 return columnBuilder_.getMessage(index); 15795 } 15796 } 15797 /** 15798 * <code>repeated .Column column = 1;</code> 15799 */ setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)15800 public Builder setColumn( 15801 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { 15802 if (columnBuilder_ == null) { 15803 if (value == null) { 15804 throw new NullPointerException(); 15805 } 15806 ensureColumnIsMutable(); 15807 column_.set(index, value); 15808 onChanged(); 15809 } else { 15810 columnBuilder_.setMessage(index, value); 15811 } 15812 return this; 15813 } 15814 /** 15815 * <code>repeated .Column column = 1;</code> 15816 */ setColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)15817 public Builder setColumn( 15818 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { 15819 if (columnBuilder_ == null) { 15820 ensureColumnIsMutable(); 15821 column_.set(index, builderForValue.build()); 15822 onChanged(); 15823 } else { 15824 columnBuilder_.setMessage(index, builderForValue.build()); 15825 } 15826 return this; 15827 } 15828 /** 15829 * <code>repeated .Column column = 1;</code> 15830 */ addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)15831 public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { 15832 if (columnBuilder_ == null) { 15833 if (value == null) { 15834 throw new NullPointerException(); 15835 } 15836 ensureColumnIsMutable(); 15837 column_.add(value); 15838 onChanged(); 15839 } else { 15840 columnBuilder_.addMessage(value); 15841 } 15842 return this; 15843 } 15844 /** 15845 * <code>repeated .Column column = 1;</code> 15846 */ addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value)15847 public Builder addColumn( 15848 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column value) { 15849 if (columnBuilder_ == null) { 15850 if (value == null) { 15851 throw new NullPointerException(); 15852 } 15853 ensureColumnIsMutable(); 15854 column_.add(index, value); 15855 onChanged(); 15856 } else { 15857 columnBuilder_.addMessage(index, value); 15858 } 15859 return this; 15860 } 15861 /** 15862 * <code>repeated .Column column = 1;</code> 15863 */ addColumn( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)15864 public Builder addColumn( 15865 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { 15866 if (columnBuilder_ == null) { 15867 ensureColumnIsMutable(); 15868 column_.add(builderForValue.build()); 15869 onChanged(); 15870 } else { 15871 columnBuilder_.addMessage(builderForValue.build()); 15872 } 15873 return this; 15874 } 15875 /** 15876 * <code>repeated .Column column = 1;</code> 15877 */ addColumn( int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue)15878 public Builder addColumn( 15879 int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder builderForValue) { 15880 if (columnBuilder_ == null) { 15881 ensureColumnIsMutable(); 15882 column_.add(index, builderForValue.build()); 15883 onChanged(); 15884 } else { 15885 columnBuilder_.addMessage(index, builderForValue.build()); 15886 } 15887 return this; 15888 } 15889 /** 15890 * <code>repeated .Column column = 1;</code> 15891 */ addAllColumn( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values)15892 public Builder addAllColumn( 15893 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column> values) { 15894 if (columnBuilder_ == null) { 15895 ensureColumnIsMutable(); 15896 super.addAll(values, column_); 15897 onChanged(); 15898 } else { 15899 columnBuilder_.addAllMessages(values); 15900 } 15901 return this; 15902 } 15903 /** 15904 * <code>repeated .Column column = 1;</code> 15905 */ clearColumn()15906 public Builder clearColumn() { 15907 if (columnBuilder_ == null) { 15908 column_ = java.util.Collections.emptyList(); 15909 bitField0_ = (bitField0_ & ~0x00000001); 15910 onChanged(); 15911 } else { 15912 columnBuilder_.clear(); 15913 } 15914 return this; 15915 } 15916 /** 15917 * <code>repeated .Column column = 1;</code> 15918 */ removeColumn(int index)15919 public Builder removeColumn(int index) { 15920 if (columnBuilder_ == null) { 15921 ensureColumnIsMutable(); 15922 column_.remove(index); 15923 onChanged(); 15924 } else { 15925 columnBuilder_.remove(index); 15926 } 15927 return this; 15928 } 15929 /** 15930 * <code>repeated .Column column = 1;</code> 15931 */ getColumnBuilder( int index)15932 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder getColumnBuilder( 15933 int index) { 15934 return getColumnFieldBuilder().getBuilder(index); 15935 } 15936 /** 15937 * <code>repeated .Column column = 1;</code> 15938 */ getColumnOrBuilder( int index)15939 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder getColumnOrBuilder( 15940 int index) { 15941 if (columnBuilder_ == null) { 15942 return column_.get(index); } else { 15943 return columnBuilder_.getMessageOrBuilder(index); 15944 } 15945 } 15946 /** 15947 * <code>repeated .Column column = 1;</code> 15948 */ 15949 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnOrBuilderList()15950 getColumnOrBuilderList() { 15951 if (columnBuilder_ != null) { 15952 return columnBuilder_.getMessageOrBuilderList(); 15953 } else { 15954 return java.util.Collections.unmodifiableList(column_); 15955 } 15956 } 15957 /** 15958 * <code>repeated .Column column = 1;</code> 15959 */ addColumnBuilder()15960 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder() { 15961 return getColumnFieldBuilder().addBuilder( 15962 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); 15963 } 15964 /** 15965 * <code>repeated .Column column = 1;</code> 15966 */ addColumnBuilder( int index)15967 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder addColumnBuilder( 15968 int index) { 15969 return getColumnFieldBuilder().addBuilder( 15970 index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.getDefaultInstance()); 15971 } 15972 /** 15973 * <code>repeated .Column column = 1;</code> 15974 */ 15975 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder> getColumnBuilderList()15976 getColumnBuilderList() { 15977 return getColumnFieldBuilder().getBuilderList(); 15978 } 15979 private com.google.protobuf.RepeatedFieldBuilder< 15980 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder> getColumnFieldBuilder()15981 getColumnFieldBuilder() { 15982 if (columnBuilder_ == null) { 15983 columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 15984 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ColumnOrBuilder>( 15985 column_, 15986 ((bitField0_ & 0x00000001) == 0x00000001), 15987 getParentForChildren(), 15988 isClean()); 15989 column_ = null; 15990 } 15991 return columnBuilder_; 15992 } 15993 15994 // repeated .NameBytesPair attribute = 2; 15995 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> attribute_ = 15996 java.util.Collections.emptyList(); ensureAttributeIsMutable()15997 private void ensureAttributeIsMutable() { 15998 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 15999 attribute_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair>(attribute_); 16000 bitField0_ |= 0x00000002; 16001 } 16002 } 16003 16004 private com.google.protobuf.RepeatedFieldBuilder< 16005 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> attributeBuilder_; 16006 16007 /** 16008 * <code>repeated .NameBytesPair attribute = 2;</code> 16009 */ getAttributeList()16010 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> getAttributeList() { 16011 if (attributeBuilder_ == null) { 16012 return java.util.Collections.unmodifiableList(attribute_); 16013 } else { 16014 return attributeBuilder_.getMessageList(); 16015 } 16016 } 16017 /** 16018 * <code>repeated .NameBytesPair attribute = 2;</code> 16019 */ getAttributeCount()16020 public int getAttributeCount() { 16021 if (attributeBuilder_ == null) { 16022 return attribute_.size(); 16023 } else { 16024 return attributeBuilder_.getCount(); 16025 } 16026 } 16027 /** 16028 * <code>repeated .NameBytesPair attribute = 2;</code> 16029 */ getAttribute(int index)16030 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getAttribute(int index) { 16031 if (attributeBuilder_ == null) { 16032 return attribute_.get(index); 16033 } else { 16034 return attributeBuilder_.getMessage(index); 16035 } 16036 } 16037 /** 16038 * <code>repeated .NameBytesPair attribute = 2;</code> 16039 */ setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)16040 public Builder setAttribute( 16041 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 16042 if (attributeBuilder_ == null) { 16043 if (value == null) { 16044 throw new NullPointerException(); 16045 } 16046 ensureAttributeIsMutable(); 16047 attribute_.set(index, value); 16048 onChanged(); 16049 } else { 16050 attributeBuilder_.setMessage(index, value); 16051 } 16052 return this; 16053 } 16054 /** 16055 * <code>repeated .NameBytesPair attribute = 2;</code> 16056 */ setAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)16057 public Builder setAttribute( 16058 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 16059 if (attributeBuilder_ == null) { 16060 ensureAttributeIsMutable(); 16061 attribute_.set(index, builderForValue.build()); 16062 onChanged(); 16063 } else { 16064 attributeBuilder_.setMessage(index, builderForValue.build()); 16065 } 16066 return this; 16067 } 16068 /** 16069 * <code>repeated .NameBytesPair attribute = 2;</code> 16070 */ addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)16071 public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 16072 if (attributeBuilder_ == null) { 16073 if (value == null) { 16074 throw new NullPointerException(); 16075 } 16076 ensureAttributeIsMutable(); 16077 attribute_.add(value); 16078 onChanged(); 16079 } else { 16080 attributeBuilder_.addMessage(value); 16081 } 16082 return this; 16083 } 16084 /** 16085 * <code>repeated .NameBytesPair attribute = 2;</code> 16086 */ addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value)16087 public Builder addAttribute( 16088 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) { 16089 if (attributeBuilder_ == null) { 16090 if (value == null) { 16091 throw new NullPointerException(); 16092 } 16093 ensureAttributeIsMutable(); 16094 attribute_.add(index, value); 16095 onChanged(); 16096 } else { 16097 attributeBuilder_.addMessage(index, value); 16098 } 16099 return this; 16100 } 16101 /** 16102 * <code>repeated .NameBytesPair attribute = 2;</code> 16103 */ addAttribute( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)16104 public Builder addAttribute( 16105 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 16106 if (attributeBuilder_ == null) { 16107 ensureAttributeIsMutable(); 16108 attribute_.add(builderForValue.build()); 16109 onChanged(); 16110 } else { 16111 attributeBuilder_.addMessage(builderForValue.build()); 16112 } 16113 return this; 16114 } 16115 /** 16116 * <code>repeated .NameBytesPair attribute = 2;</code> 16117 */ addAttribute( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue)16118 public Builder addAttribute( 16119 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) { 16120 if (attributeBuilder_ == null) { 16121 ensureAttributeIsMutable(); 16122 attribute_.add(index, builderForValue.build()); 16123 onChanged(); 16124 } else { 16125 attributeBuilder_.addMessage(index, builderForValue.build()); 16126 } 16127 return this; 16128 } 16129 /** 16130 * <code>repeated .NameBytesPair attribute = 2;</code> 16131 */ addAllAttribute( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values)16132 public Builder addAllAttribute( 16133 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) { 16134 if (attributeBuilder_ == null) { 16135 ensureAttributeIsMutable(); 16136 super.addAll(values, attribute_); 16137 onChanged(); 16138 } else { 16139 attributeBuilder_.addAllMessages(values); 16140 } 16141 return this; 16142 } 16143 /** 16144 * <code>repeated .NameBytesPair attribute = 2;</code> 16145 */ clearAttribute()16146 public Builder clearAttribute() { 16147 if (attributeBuilder_ == null) { 16148 attribute_ = java.util.Collections.emptyList(); 16149 bitField0_ = (bitField0_ & ~0x00000002); 16150 onChanged(); 16151 } else { 16152 attributeBuilder_.clear(); 16153 } 16154 return this; 16155 } 16156 /** 16157 * <code>repeated .NameBytesPair attribute = 2;</code> 16158 */ removeAttribute(int index)16159 public Builder removeAttribute(int index) { 16160 if (attributeBuilder_ == null) { 16161 ensureAttributeIsMutable(); 16162 attribute_.remove(index); 16163 onChanged(); 16164 } else { 16165 attributeBuilder_.remove(index); 16166 } 16167 return this; 16168 } 16169 /** 16170 * <code>repeated .NameBytesPair attribute = 2;</code> 16171 */ getAttributeBuilder( int index)16172 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getAttributeBuilder( 16173 int index) { 16174 return getAttributeFieldBuilder().getBuilder(index); 16175 } 16176 /** 16177 * <code>repeated .NameBytesPair attribute = 2;</code> 16178 */ getAttributeOrBuilder( int index)16179 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getAttributeOrBuilder( 16180 int index) { 16181 if (attributeBuilder_ == null) { 16182 return attribute_.get(index); } else { 16183 return attributeBuilder_.getMessageOrBuilder(index); 16184 } 16185 } 16186 /** 16187 * <code>repeated .NameBytesPair attribute = 2;</code> 16188 */ 16189 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeOrBuilderList()16190 getAttributeOrBuilderList() { 16191 if (attributeBuilder_ != null) { 16192 return attributeBuilder_.getMessageOrBuilderList(); 16193 } else { 16194 return java.util.Collections.unmodifiableList(attribute_); 16195 } 16196 } 16197 /** 16198 * <code>repeated .NameBytesPair attribute = 2;</code> 16199 */ addAttributeBuilder()16200 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder() { 16201 return getAttributeFieldBuilder().addBuilder( 16202 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); 16203 } 16204 /** 16205 * <code>repeated .NameBytesPair attribute = 2;</code> 16206 */ addAttributeBuilder( int index)16207 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addAttributeBuilder( 16208 int index) { 16209 return getAttributeFieldBuilder().addBuilder( 16210 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()); 16211 } 16212 /** 16213 * <code>repeated .NameBytesPair attribute = 2;</code> 16214 */ 16215 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder> getAttributeBuilderList()16216 getAttributeBuilderList() { 16217 return getAttributeFieldBuilder().getBuilderList(); 16218 } 16219 private com.google.protobuf.RepeatedFieldBuilder< 16220 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> getAttributeFieldBuilder()16221 getAttributeFieldBuilder() { 16222 if (attributeBuilder_ == null) { 16223 attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 16224 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>( 16225 attribute_, 16226 ((bitField0_ & 0x00000002) == 0x00000002), 16227 getParentForChildren(), 16228 isClean()); 16229 attribute_ = null; 16230 } 16231 return attributeBuilder_; 16232 } 16233 16234 // optional bytes start_row = 3; 16235 private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; 16236 /** 16237 * <code>optional bytes start_row = 3;</code> 16238 */ hasStartRow()16239 public boolean hasStartRow() { 16240 return ((bitField0_ & 0x00000004) == 0x00000004); 16241 } 16242 /** 16243 * <code>optional bytes start_row = 3;</code> 16244 */ getStartRow()16245 public com.google.protobuf.ByteString getStartRow() { 16246 return startRow_; 16247 } 16248 /** 16249 * <code>optional bytes start_row = 3;</code> 16250 */ setStartRow(com.google.protobuf.ByteString value)16251 public Builder setStartRow(com.google.protobuf.ByteString value) { 16252 if (value == null) { 16253 throw new NullPointerException(); 16254 } 16255 bitField0_ |= 0x00000004; 16256 startRow_ = value; 16257 onChanged(); 16258 return this; 16259 } 16260 /** 16261 * <code>optional bytes start_row = 3;</code> 16262 */ clearStartRow()16263 public Builder clearStartRow() { 16264 bitField0_ = (bitField0_ & ~0x00000004); 16265 startRow_ = getDefaultInstance().getStartRow(); 16266 onChanged(); 16267 return this; 16268 } 16269 16270 // optional bytes stop_row = 4; 16271 private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; 16272 /** 16273 * <code>optional bytes stop_row = 4;</code> 16274 */ hasStopRow()16275 public boolean hasStopRow() { 16276 return ((bitField0_ & 0x00000008) == 0x00000008); 16277 } 16278 /** 16279 * <code>optional bytes stop_row = 4;</code> 16280 */ getStopRow()16281 public com.google.protobuf.ByteString getStopRow() { 16282 return stopRow_; 16283 } 16284 /** 16285 * <code>optional bytes stop_row = 4;</code> 16286 */ setStopRow(com.google.protobuf.ByteString value)16287 public Builder setStopRow(com.google.protobuf.ByteString value) { 16288 if (value == null) { 16289 throw new NullPointerException(); 16290 } 16291 bitField0_ |= 0x00000008; 16292 stopRow_ = value; 16293 onChanged(); 16294 return this; 16295 } 16296 /** 16297 * <code>optional bytes stop_row = 4;</code> 16298 */ clearStopRow()16299 public Builder clearStopRow() { 16300 bitField0_ = (bitField0_ & ~0x00000008); 16301 stopRow_ = getDefaultInstance().getStopRow(); 16302 onChanged(); 16303 return this; 16304 } 16305 16306 // optional .Filter filter = 5; 16307 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 16308 private com.google.protobuf.SingleFieldBuilder< 16309 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; 16310 /** 16311 * <code>optional .Filter filter = 5;</code> 16312 */ hasFilter()16313 public boolean hasFilter() { 16314 return ((bitField0_ & 0x00000010) == 0x00000010); 16315 } 16316 /** 16317 * <code>optional .Filter filter = 5;</code> 16318 */ getFilter()16319 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { 16320 if (filterBuilder_ == null) { 16321 return filter_; 16322 } else { 16323 return filterBuilder_.getMessage(); 16324 } 16325 } 16326 /** 16327 * <code>optional .Filter filter = 5;</code> 16328 */ setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)16329 public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 16330 if (filterBuilder_ == null) { 16331 if (value == null) { 16332 throw new NullPointerException(); 16333 } 16334 filter_ = value; 16335 onChanged(); 16336 } else { 16337 filterBuilder_.setMessage(value); 16338 } 16339 bitField0_ |= 0x00000010; 16340 return this; 16341 } 16342 /** 16343 * <code>optional .Filter filter = 5;</code> 16344 */ setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)16345 public Builder setFilter( 16346 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { 16347 if (filterBuilder_ == null) { 16348 filter_ = builderForValue.build(); 16349 onChanged(); 16350 } else { 16351 filterBuilder_.setMessage(builderForValue.build()); 16352 } 16353 bitField0_ |= 0x00000010; 16354 return this; 16355 } 16356 /** 16357 * <code>optional .Filter filter = 5;</code> 16358 */ mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)16359 public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 16360 if (filterBuilder_ == null) { 16361 if (((bitField0_ & 0x00000010) == 0x00000010) && 16362 filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { 16363 filter_ = 16364 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); 16365 } else { 16366 filter_ = value; 16367 } 16368 onChanged(); 16369 } else { 16370 filterBuilder_.mergeFrom(value); 16371 } 16372 bitField0_ |= 0x00000010; 16373 return this; 16374 } 16375 /** 16376 * <code>optional .Filter filter = 5;</code> 16377 */ clearFilter()16378 public Builder clearFilter() { 16379 if (filterBuilder_ == null) { 16380 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 16381 onChanged(); 16382 } else { 16383 filterBuilder_.clear(); 16384 } 16385 bitField0_ = (bitField0_ & ~0x00000010); 16386 return this; 16387 } 16388 /** 16389 * <code>optional .Filter filter = 5;</code> 16390 */ getFilterBuilder()16391 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { 16392 bitField0_ |= 0x00000010; 16393 onChanged(); 16394 return getFilterFieldBuilder().getBuilder(); 16395 } 16396 /** 16397 * <code>optional .Filter filter = 5;</code> 16398 */ getFilterOrBuilder()16399 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { 16400 if (filterBuilder_ != null) { 16401 return filterBuilder_.getMessageOrBuilder(); 16402 } else { 16403 return filter_; 16404 } 16405 } 16406 /** 16407 * <code>optional .Filter filter = 5;</code> 16408 */ 16409 private com.google.protobuf.SingleFieldBuilder< 16410 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder()16411 getFilterFieldBuilder() { 16412 if (filterBuilder_ == null) { 16413 filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< 16414 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( 16415 filter_, 16416 getParentForChildren(), 16417 isClean()); 16418 filter_ = null; 16419 } 16420 return filterBuilder_; 16421 } 16422 16423 // optional .TimeRange time_range = 6; 16424 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 16425 private com.google.protobuf.SingleFieldBuilder< 16426 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; 16427 /** 16428 * <code>optional .TimeRange time_range = 6;</code> 16429 */ hasTimeRange()16430 public boolean hasTimeRange() { 16431 return ((bitField0_ & 0x00000020) == 0x00000020); 16432 } 16433 /** 16434 * <code>optional .TimeRange time_range = 6;</code> 16435 */ getTimeRange()16436 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 16437 if (timeRangeBuilder_ == null) { 16438 return timeRange_; 16439 } else { 16440 return timeRangeBuilder_.getMessage(); 16441 } 16442 } 16443 /** 16444 * <code>optional .TimeRange time_range = 6;</code> 16445 */ setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)16446 public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 16447 if (timeRangeBuilder_ == null) { 16448 if (value == null) { 16449 throw new NullPointerException(); 16450 } 16451 timeRange_ = value; 16452 onChanged(); 16453 } else { 16454 timeRangeBuilder_.setMessage(value); 16455 } 16456 bitField0_ |= 0x00000020; 16457 return this; 16458 } 16459 /** 16460 * <code>optional .TimeRange time_range = 6;</code> 16461 */ setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)16462 public Builder setTimeRange( 16463 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { 16464 if (timeRangeBuilder_ == null) { 16465 timeRange_ = builderForValue.build(); 16466 onChanged(); 16467 } else { 16468 timeRangeBuilder_.setMessage(builderForValue.build()); 16469 } 16470 bitField0_ |= 0x00000020; 16471 return this; 16472 } 16473 /** 16474 * <code>optional .TimeRange time_range = 6;</code> 16475 */ mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)16476 public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 16477 if (timeRangeBuilder_ == null) { 16478 if (((bitField0_ & 0x00000020) == 0x00000020) && 16479 timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { 16480 timeRange_ = 16481 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); 16482 } else { 16483 timeRange_ = value; 16484 } 16485 onChanged(); 16486 } else { 16487 timeRangeBuilder_.mergeFrom(value); 16488 } 16489 bitField0_ |= 0x00000020; 16490 return this; 16491 } 16492 /** 16493 * <code>optional .TimeRange time_range = 6;</code> 16494 */ clearTimeRange()16495 public Builder clearTimeRange() { 16496 if (timeRangeBuilder_ == null) { 16497 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 16498 onChanged(); 16499 } else { 16500 timeRangeBuilder_.clear(); 16501 } 16502 bitField0_ = (bitField0_ & ~0x00000020); 16503 return this; 16504 } 16505 /** 16506 * <code>optional .TimeRange time_range = 6;</code> 16507 */ getTimeRangeBuilder()16508 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { 16509 bitField0_ |= 0x00000020; 16510 onChanged(); 16511 return getTimeRangeFieldBuilder().getBuilder(); 16512 } 16513 /** 16514 * <code>optional .TimeRange time_range = 6;</code> 16515 */ getTimeRangeOrBuilder()16516 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 16517 if (timeRangeBuilder_ != null) { 16518 return timeRangeBuilder_.getMessageOrBuilder(); 16519 } else { 16520 return timeRange_; 16521 } 16522 } 16523 /** 16524 * <code>optional .TimeRange time_range = 6;</code> 16525 */ 16526 private com.google.protobuf.SingleFieldBuilder< 16527 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder()16528 getTimeRangeFieldBuilder() { 16529 if (timeRangeBuilder_ == null) { 16530 timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< 16531 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( 16532 timeRange_, 16533 getParentForChildren(), 16534 isClean()); 16535 timeRange_ = null; 16536 } 16537 return timeRangeBuilder_; 16538 } 16539 16540 // optional uint32 max_versions = 7 [default = 1]; 16541 private int maxVersions_ = 1; 16542 /** 16543 * <code>optional uint32 max_versions = 7 [default = 1];</code> 16544 */ hasMaxVersions()16545 public boolean hasMaxVersions() { 16546 return ((bitField0_ & 0x00000040) == 0x00000040); 16547 } 16548 /** 16549 * <code>optional uint32 max_versions = 7 [default = 1];</code> 16550 */ getMaxVersions()16551 public int getMaxVersions() { 16552 return maxVersions_; 16553 } 16554 /** 16555 * <code>optional uint32 max_versions = 7 [default = 1];</code> 16556 */ setMaxVersions(int value)16557 public Builder setMaxVersions(int value) { 16558 bitField0_ |= 0x00000040; 16559 maxVersions_ = value; 16560 onChanged(); 16561 return this; 16562 } 16563 /** 16564 * <code>optional uint32 max_versions = 7 [default = 1];</code> 16565 */ clearMaxVersions()16566 public Builder clearMaxVersions() { 16567 bitField0_ = (bitField0_ & ~0x00000040); 16568 maxVersions_ = 1; 16569 onChanged(); 16570 return this; 16571 } 16572 16573 // optional bool cache_blocks = 8 [default = true]; 16574 private boolean cacheBlocks_ = true; 16575 /** 16576 * <code>optional bool cache_blocks = 8 [default = true];</code> 16577 */ hasCacheBlocks()16578 public boolean hasCacheBlocks() { 16579 return ((bitField0_ & 0x00000080) == 0x00000080); 16580 } 16581 /** 16582 * <code>optional bool cache_blocks = 8 [default = true];</code> 16583 */ getCacheBlocks()16584 public boolean getCacheBlocks() { 16585 return cacheBlocks_; 16586 } 16587 /** 16588 * <code>optional bool cache_blocks = 8 [default = true];</code> 16589 */ setCacheBlocks(boolean value)16590 public Builder setCacheBlocks(boolean value) { 16591 bitField0_ |= 0x00000080; 16592 cacheBlocks_ = value; 16593 onChanged(); 16594 return this; 16595 } 16596 /** 16597 * <code>optional bool cache_blocks = 8 [default = true];</code> 16598 */ clearCacheBlocks()16599 public Builder clearCacheBlocks() { 16600 bitField0_ = (bitField0_ & ~0x00000080); 16601 cacheBlocks_ = true; 16602 onChanged(); 16603 return this; 16604 } 16605 16606 // optional uint32 batch_size = 9; 16607 private int batchSize_ ; 16608 /** 16609 * <code>optional uint32 batch_size = 9;</code> 16610 */ hasBatchSize()16611 public boolean hasBatchSize() { 16612 return ((bitField0_ & 0x00000100) == 0x00000100); 16613 } 16614 /** 16615 * <code>optional uint32 batch_size = 9;</code> 16616 */ getBatchSize()16617 public int getBatchSize() { 16618 return batchSize_; 16619 } 16620 /** 16621 * <code>optional uint32 batch_size = 9;</code> 16622 */ setBatchSize(int value)16623 public Builder setBatchSize(int value) { 16624 bitField0_ |= 0x00000100; 16625 batchSize_ = value; 16626 onChanged(); 16627 return this; 16628 } 16629 /** 16630 * <code>optional uint32 batch_size = 9;</code> 16631 */ clearBatchSize()16632 public Builder clearBatchSize() { 16633 bitField0_ = (bitField0_ & ~0x00000100); 16634 batchSize_ = 0; 16635 onChanged(); 16636 return this; 16637 } 16638 16639 // optional uint64 max_result_size = 10; 16640 private long maxResultSize_ ; 16641 /** 16642 * <code>optional uint64 max_result_size = 10;</code> 16643 */ hasMaxResultSize()16644 public boolean hasMaxResultSize() { 16645 return ((bitField0_ & 0x00000200) == 0x00000200); 16646 } 16647 /** 16648 * <code>optional uint64 max_result_size = 10;</code> 16649 */ getMaxResultSize()16650 public long getMaxResultSize() { 16651 return maxResultSize_; 16652 } 16653 /** 16654 * <code>optional uint64 max_result_size = 10;</code> 16655 */ setMaxResultSize(long value)16656 public Builder setMaxResultSize(long value) { 16657 bitField0_ |= 0x00000200; 16658 maxResultSize_ = value; 16659 onChanged(); 16660 return this; 16661 } 16662 /** 16663 * <code>optional uint64 max_result_size = 10;</code> 16664 */ clearMaxResultSize()16665 public Builder clearMaxResultSize() { 16666 bitField0_ = (bitField0_ & ~0x00000200); 16667 maxResultSize_ = 0L; 16668 onChanged(); 16669 return this; 16670 } 16671 16672 // optional uint32 store_limit = 11; 16673 private int storeLimit_ ; 16674 /** 16675 * <code>optional uint32 store_limit = 11;</code> 16676 */ hasStoreLimit()16677 public boolean hasStoreLimit() { 16678 return ((bitField0_ & 0x00000400) == 0x00000400); 16679 } 16680 /** 16681 * <code>optional uint32 store_limit = 11;</code> 16682 */ getStoreLimit()16683 public int getStoreLimit() { 16684 return storeLimit_; 16685 } 16686 /** 16687 * <code>optional uint32 store_limit = 11;</code> 16688 */ setStoreLimit(int value)16689 public Builder setStoreLimit(int value) { 16690 bitField0_ |= 0x00000400; 16691 storeLimit_ = value; 16692 onChanged(); 16693 return this; 16694 } 16695 /** 16696 * <code>optional uint32 store_limit = 11;</code> 16697 */ clearStoreLimit()16698 public Builder clearStoreLimit() { 16699 bitField0_ = (bitField0_ & ~0x00000400); 16700 storeLimit_ = 0; 16701 onChanged(); 16702 return this; 16703 } 16704 16705 // optional uint32 store_offset = 12; 16706 private int storeOffset_ ; 16707 /** 16708 * <code>optional uint32 store_offset = 12;</code> 16709 */ hasStoreOffset()16710 public boolean hasStoreOffset() { 16711 return ((bitField0_ & 0x00000800) == 0x00000800); 16712 } 16713 /** 16714 * <code>optional uint32 store_offset = 12;</code> 16715 */ getStoreOffset()16716 public int getStoreOffset() { 16717 return storeOffset_; 16718 } 16719 /** 16720 * <code>optional uint32 store_offset = 12;</code> 16721 */ setStoreOffset(int value)16722 public Builder setStoreOffset(int value) { 16723 bitField0_ |= 0x00000800; 16724 storeOffset_ = value; 16725 onChanged(); 16726 return this; 16727 } 16728 /** 16729 * <code>optional uint32 store_offset = 12;</code> 16730 */ clearStoreOffset()16731 public Builder clearStoreOffset() { 16732 bitField0_ = (bitField0_ & ~0x00000800); 16733 storeOffset_ = 0; 16734 onChanged(); 16735 return this; 16736 } 16737 16738 // optional bool load_column_families_on_demand = 13; 16739 private boolean loadColumnFamiliesOnDemand_ ; 16740 /** 16741 * <code>optional bool load_column_families_on_demand = 13;</code> 16742 * 16743 * <pre> 16744 * DO NOT add defaults to load_column_families_on_demand. 16745 * </pre> 16746 */ hasLoadColumnFamiliesOnDemand()16747 public boolean hasLoadColumnFamiliesOnDemand() { 16748 return ((bitField0_ & 0x00001000) == 0x00001000); 16749 } 16750 /** 16751 * <code>optional bool load_column_families_on_demand = 13;</code> 16752 * 16753 * <pre> 16754 * DO NOT add defaults to load_column_families_on_demand. 16755 * </pre> 16756 */ getLoadColumnFamiliesOnDemand()16757 public boolean getLoadColumnFamiliesOnDemand() { 16758 return loadColumnFamiliesOnDemand_; 16759 } 16760 /** 16761 * <code>optional bool load_column_families_on_demand = 13;</code> 16762 * 16763 * <pre> 16764 * DO NOT add defaults to load_column_families_on_demand. 16765 * </pre> 16766 */ setLoadColumnFamiliesOnDemand(boolean value)16767 public Builder setLoadColumnFamiliesOnDemand(boolean value) { 16768 bitField0_ |= 0x00001000; 16769 loadColumnFamiliesOnDemand_ = value; 16770 onChanged(); 16771 return this; 16772 } 16773 /** 16774 * <code>optional bool load_column_families_on_demand = 13;</code> 16775 * 16776 * <pre> 16777 * DO NOT add defaults to load_column_families_on_demand. 16778 * </pre> 16779 */ clearLoadColumnFamiliesOnDemand()16780 public Builder clearLoadColumnFamiliesOnDemand() { 16781 bitField0_ = (bitField0_ & ~0x00001000); 16782 loadColumnFamiliesOnDemand_ = false; 16783 onChanged(); 16784 return this; 16785 } 16786 16787 // optional bool small = 14; 16788 private boolean small_ ; 16789 /** 16790 * <code>optional bool small = 14;</code> 16791 */ hasSmall()16792 public boolean hasSmall() { 16793 return ((bitField0_ & 0x00002000) == 0x00002000); 16794 } 16795 /** 16796 * <code>optional bool small = 14;</code> 16797 */ getSmall()16798 public boolean getSmall() { 16799 return small_; 16800 } 16801 /** 16802 * <code>optional bool small = 14;</code> 16803 */ setSmall(boolean value)16804 public Builder setSmall(boolean value) { 16805 bitField0_ |= 0x00002000; 16806 small_ = value; 16807 onChanged(); 16808 return this; 16809 } 16810 /** 16811 * <code>optional bool small = 14;</code> 16812 */ clearSmall()16813 public Builder clearSmall() { 16814 bitField0_ = (bitField0_ & ~0x00002000); 16815 small_ = false; 16816 onChanged(); 16817 return this; 16818 } 16819 16820 // optional bool reversed = 15 [default = false]; 16821 private boolean reversed_ ; 16822 /** 16823 * <code>optional bool reversed = 15 [default = false];</code> 16824 */ hasReversed()16825 public boolean hasReversed() { 16826 return ((bitField0_ & 0x00004000) == 0x00004000); 16827 } 16828 /** 16829 * <code>optional bool reversed = 15 [default = false];</code> 16830 */ getReversed()16831 public boolean getReversed() { 16832 return reversed_; 16833 } 16834 /** 16835 * <code>optional bool reversed = 15 [default = false];</code> 16836 */ setReversed(boolean value)16837 public Builder setReversed(boolean value) { 16838 bitField0_ |= 0x00004000; 16839 reversed_ = value; 16840 onChanged(); 16841 return this; 16842 } 16843 /** 16844 * <code>optional bool reversed = 15 [default = false];</code> 16845 */ clearReversed()16846 public Builder clearReversed() { 16847 bitField0_ = (bitField0_ & ~0x00004000); 16848 reversed_ = false; 16849 onChanged(); 16850 return this; 16851 } 16852 16853 // optional .Consistency consistency = 16 [default = STRONG]; 16854 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 16855 /** 16856 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 16857 */ hasConsistency()16858 public boolean hasConsistency() { 16859 return ((bitField0_ & 0x00008000) == 0x00008000); 16860 } 16861 /** 16862 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 16863 */ getConsistency()16864 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency getConsistency() { 16865 return consistency_; 16866 } 16867 /** 16868 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 16869 */ setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value)16870 public Builder setConsistency(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency value) { 16871 if (value == null) { 16872 throw new NullPointerException(); 16873 } 16874 bitField0_ |= 0x00008000; 16875 consistency_ = value; 16876 onChanged(); 16877 return this; 16878 } 16879 /** 16880 * <code>optional .Consistency consistency = 16 [default = STRONG];</code> 16881 */ clearConsistency()16882 public Builder clearConsistency() { 16883 bitField0_ = (bitField0_ & ~0x00008000); 16884 consistency_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Consistency.STRONG; 16885 onChanged(); 16886 return this; 16887 } 16888 16889 // optional uint32 caching = 17; 16890 private int caching_ ; 16891 /** 16892 * <code>optional uint32 caching = 17;</code> 16893 */ hasCaching()16894 public boolean hasCaching() { 16895 return ((bitField0_ & 0x00010000) == 0x00010000); 16896 } 16897 /** 16898 * <code>optional uint32 caching = 17;</code> 16899 */ getCaching()16900 public int getCaching() { 16901 return caching_; 16902 } 16903 /** 16904 * <code>optional uint32 caching = 17;</code> 16905 */ setCaching(int value)16906 public Builder setCaching(int value) { 16907 bitField0_ |= 0x00010000; 16908 caching_ = value; 16909 onChanged(); 16910 return this; 16911 } 16912 /** 16913 * <code>optional uint32 caching = 17;</code> 16914 */ clearCaching()16915 public Builder clearCaching() { 16916 bitField0_ = (bitField0_ & ~0x00010000); 16917 caching_ = 0; 16918 onChanged(); 16919 return this; 16920 } 16921 16922 // optional bool allow_partial_results = 18; 16923 private boolean allowPartialResults_ ; 16924 /** 16925 * <code>optional bool allow_partial_results = 18;</code> 16926 */ hasAllowPartialResults()16927 public boolean hasAllowPartialResults() { 16928 return ((bitField0_ & 0x00020000) == 0x00020000); 16929 } 16930 /** 16931 * <code>optional bool allow_partial_results = 18;</code> 16932 */ getAllowPartialResults()16933 public boolean getAllowPartialResults() { 16934 return allowPartialResults_; 16935 } 16936 /** 16937 * <code>optional bool allow_partial_results = 18;</code> 16938 */ setAllowPartialResults(boolean value)16939 public Builder setAllowPartialResults(boolean value) { 16940 bitField0_ |= 0x00020000; 16941 allowPartialResults_ = value; 16942 onChanged(); 16943 return this; 16944 } 16945 /** 16946 * <code>optional bool allow_partial_results = 18;</code> 16947 */ clearAllowPartialResults()16948 public Builder clearAllowPartialResults() { 16949 bitField0_ = (bitField0_ & ~0x00020000); 16950 allowPartialResults_ = false; 16951 onChanged(); 16952 return this; 16953 } 16954 16955 // repeated .ColumnFamilyTimeRange cf_time_range = 19; 16956 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> cfTimeRange_ = 16957 java.util.Collections.emptyList(); ensureCfTimeRangeIsMutable()16958 private void ensureCfTimeRangeIsMutable() { 16959 if (!((bitField0_ & 0x00040000) == 0x00040000)) { 16960 cfTimeRange_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange>(cfTimeRange_); 16961 bitField0_ |= 0x00040000; 16962 } 16963 } 16964 16965 private com.google.protobuf.RepeatedFieldBuilder< 16966 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> cfTimeRangeBuilder_; 16967 16968 /** 16969 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 16970 */ getCfTimeRangeList()16971 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> getCfTimeRangeList() { 16972 if (cfTimeRangeBuilder_ == null) { 16973 return java.util.Collections.unmodifiableList(cfTimeRange_); 16974 } else { 16975 return cfTimeRangeBuilder_.getMessageList(); 16976 } 16977 } 16978 /** 16979 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 16980 */ getCfTimeRangeCount()16981 public int getCfTimeRangeCount() { 16982 if (cfTimeRangeBuilder_ == null) { 16983 return cfTimeRange_.size(); 16984 } else { 16985 return cfTimeRangeBuilder_.getCount(); 16986 } 16987 } 16988 /** 16989 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 16990 */ getCfTimeRange(int index)16991 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getCfTimeRange(int index) { 16992 if (cfTimeRangeBuilder_ == null) { 16993 return cfTimeRange_.get(index); 16994 } else { 16995 return cfTimeRangeBuilder_.getMessage(index); 16996 } 16997 } 16998 /** 16999 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17000 */ setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)17001 public Builder setCfTimeRange( 17002 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { 17003 if (cfTimeRangeBuilder_ == null) { 17004 if (value == null) { 17005 throw new NullPointerException(); 17006 } 17007 ensureCfTimeRangeIsMutable(); 17008 cfTimeRange_.set(index, value); 17009 onChanged(); 17010 } else { 17011 cfTimeRangeBuilder_.setMessage(index, value); 17012 } 17013 return this; 17014 } 17015 /** 17016 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17017 */ setCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)17018 public Builder setCfTimeRange( 17019 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { 17020 if (cfTimeRangeBuilder_ == null) { 17021 ensureCfTimeRangeIsMutable(); 17022 cfTimeRange_.set(index, builderForValue.build()); 17023 onChanged(); 17024 } else { 17025 cfTimeRangeBuilder_.setMessage(index, builderForValue.build()); 17026 } 17027 return this; 17028 } 17029 /** 17030 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17031 */ addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)17032 public Builder addCfTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { 17033 if (cfTimeRangeBuilder_ == null) { 17034 if (value == null) { 17035 throw new NullPointerException(); 17036 } 17037 ensureCfTimeRangeIsMutable(); 17038 cfTimeRange_.add(value); 17039 onChanged(); 17040 } else { 17041 cfTimeRangeBuilder_.addMessage(value); 17042 } 17043 return this; 17044 } 17045 /** 17046 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17047 */ addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value)17048 public Builder addCfTimeRange( 17049 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange value) { 17050 if (cfTimeRangeBuilder_ == null) { 17051 if (value == null) { 17052 throw new NullPointerException(); 17053 } 17054 ensureCfTimeRangeIsMutable(); 17055 cfTimeRange_.add(index, value); 17056 onChanged(); 17057 } else { 17058 cfTimeRangeBuilder_.addMessage(index, value); 17059 } 17060 return this; 17061 } 17062 /** 17063 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17064 */ addCfTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)17065 public Builder addCfTimeRange( 17066 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { 17067 if (cfTimeRangeBuilder_ == null) { 17068 ensureCfTimeRangeIsMutable(); 17069 cfTimeRange_.add(builderForValue.build()); 17070 onChanged(); 17071 } else { 17072 cfTimeRangeBuilder_.addMessage(builderForValue.build()); 17073 } 17074 return this; 17075 } 17076 /** 17077 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17078 */ addCfTimeRange( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue)17079 public Builder addCfTimeRange( 17080 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder builderForValue) { 17081 if (cfTimeRangeBuilder_ == null) { 17082 ensureCfTimeRangeIsMutable(); 17083 cfTimeRange_.add(index, builderForValue.build()); 17084 onChanged(); 17085 } else { 17086 cfTimeRangeBuilder_.addMessage(index, builderForValue.build()); 17087 } 17088 return this; 17089 } 17090 /** 17091 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17092 */ addAllCfTimeRange( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values)17093 public Builder addAllCfTimeRange( 17094 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange> values) { 17095 if (cfTimeRangeBuilder_ == null) { 17096 ensureCfTimeRangeIsMutable(); 17097 super.addAll(values, cfTimeRange_); 17098 onChanged(); 17099 } else { 17100 cfTimeRangeBuilder_.addAllMessages(values); 17101 } 17102 return this; 17103 } 17104 /** 17105 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17106 */ clearCfTimeRange()17107 public Builder clearCfTimeRange() { 17108 if (cfTimeRangeBuilder_ == null) { 17109 cfTimeRange_ = java.util.Collections.emptyList(); 17110 bitField0_ = (bitField0_ & ~0x00040000); 17111 onChanged(); 17112 } else { 17113 cfTimeRangeBuilder_.clear(); 17114 } 17115 return this; 17116 } 17117 /** 17118 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17119 */ removeCfTimeRange(int index)17120 public Builder removeCfTimeRange(int index) { 17121 if (cfTimeRangeBuilder_ == null) { 17122 ensureCfTimeRangeIsMutable(); 17123 cfTimeRange_.remove(index); 17124 onChanged(); 17125 } else { 17126 cfTimeRangeBuilder_.remove(index); 17127 } 17128 return this; 17129 } 17130 /** 17131 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17132 */ getCfTimeRangeBuilder( int index)17133 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder getCfTimeRangeBuilder( 17134 int index) { 17135 return getCfTimeRangeFieldBuilder().getBuilder(index); 17136 } 17137 /** 17138 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17139 */ getCfTimeRangeOrBuilder( int index)17140 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder getCfTimeRangeOrBuilder( 17141 int index) { 17142 if (cfTimeRangeBuilder_ == null) { 17143 return cfTimeRange_.get(index); } else { 17144 return cfTimeRangeBuilder_.getMessageOrBuilder(index); 17145 } 17146 } 17147 /** 17148 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17149 */ 17150 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeOrBuilderList()17151 getCfTimeRangeOrBuilderList() { 17152 if (cfTimeRangeBuilder_ != null) { 17153 return cfTimeRangeBuilder_.getMessageOrBuilderList(); 17154 } else { 17155 return java.util.Collections.unmodifiableList(cfTimeRange_); 17156 } 17157 } 17158 /** 17159 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17160 */ addCfTimeRangeBuilder()17161 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder() { 17162 return getCfTimeRangeFieldBuilder().addBuilder( 17163 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); 17164 } 17165 /** 17166 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17167 */ addCfTimeRangeBuilder( int index)17168 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder addCfTimeRangeBuilder( 17169 int index) { 17170 return getCfTimeRangeFieldBuilder().addBuilder( 17171 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()); 17172 } 17173 /** 17174 * <code>repeated .ColumnFamilyTimeRange cf_time_range = 19;</code> 17175 */ 17176 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder> getCfTimeRangeBuilderList()17177 getCfTimeRangeBuilderList() { 17178 return getCfTimeRangeFieldBuilder().getBuilderList(); 17179 } 17180 private com.google.protobuf.RepeatedFieldBuilder< 17181 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder> getCfTimeRangeFieldBuilder()17182 getCfTimeRangeFieldBuilder() { 17183 if (cfTimeRangeBuilder_ == null) { 17184 cfTimeRangeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 17185 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder>( 17186 cfTimeRange_, 17187 ((bitField0_ & 0x00040000) == 0x00040000), 17188 getParentForChildren(), 17189 isClean()); 17190 cfTimeRange_ = null; 17191 } 17192 return cfTimeRangeBuilder_; 17193 } 17194 17195 // @@protoc_insertion_point(builder_scope:Scan) 17196 } 17197 17198 static { 17199 defaultInstance = new Scan(true); defaultInstance.initFields()17200 defaultInstance.initFields(); 17201 } 17202 17203 // @@protoc_insertion_point(class_scope:Scan) 17204 } 17205 17206 public interface ScanRequestOrBuilder 17207 extends com.google.protobuf.MessageOrBuilder { 17208 17209 // optional .RegionSpecifier region = 1; 17210 /** 17211 * <code>optional .RegionSpecifier region = 1;</code> 17212 */ hasRegion()17213 boolean hasRegion(); 17214 /** 17215 * <code>optional .RegionSpecifier region = 1;</code> 17216 */ getRegion()17217 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 17218 /** 17219 * <code>optional .RegionSpecifier region = 1;</code> 17220 */ getRegionOrBuilder()17221 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 17222 17223 // optional .Scan scan = 2; 17224 /** 17225 * <code>optional .Scan scan = 2;</code> 17226 */ hasScan()17227 boolean hasScan(); 17228 /** 17229 * <code>optional .Scan scan = 2;</code> 17230 */ getScan()17231 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); 17232 /** 17233 * <code>optional .Scan scan = 2;</code> 17234 */ getScanOrBuilder()17235 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); 17236 17237 // optional uint64 scanner_id = 3; 17238 /** 17239 * <code>optional uint64 scanner_id = 3;</code> 17240 */ hasScannerId()17241 boolean hasScannerId(); 17242 /** 17243 * <code>optional uint64 scanner_id = 3;</code> 17244 */ getScannerId()17245 long getScannerId(); 17246 17247 // optional uint32 number_of_rows = 4; 17248 /** 17249 * <code>optional uint32 number_of_rows = 4;</code> 17250 */ hasNumberOfRows()17251 boolean hasNumberOfRows(); 17252 /** 17253 * <code>optional uint32 number_of_rows = 4;</code> 17254 */ getNumberOfRows()17255 int getNumberOfRows(); 17256 17257 // optional bool close_scanner = 5; 17258 /** 17259 * <code>optional bool close_scanner = 5;</code> 17260 */ hasCloseScanner()17261 boolean hasCloseScanner(); 17262 /** 17263 * <code>optional bool close_scanner = 5;</code> 17264 */ getCloseScanner()17265 boolean getCloseScanner(); 17266 17267 // optional uint64 next_call_seq = 6; 17268 /** 17269 * <code>optional uint64 next_call_seq = 6;</code> 17270 */ hasNextCallSeq()17271 boolean hasNextCallSeq(); 17272 /** 17273 * <code>optional uint64 next_call_seq = 6;</code> 17274 */ getNextCallSeq()17275 long getNextCallSeq(); 17276 17277 // optional bool client_handles_partials = 7; 17278 /** 17279 * <code>optional bool client_handles_partials = 7;</code> 17280 */ hasClientHandlesPartials()17281 boolean hasClientHandlesPartials(); 17282 /** 17283 * <code>optional bool client_handles_partials = 7;</code> 17284 */ getClientHandlesPartials()17285 boolean getClientHandlesPartials(); 17286 17287 // optional bool client_handles_heartbeats = 8; 17288 /** 17289 * <code>optional bool client_handles_heartbeats = 8;</code> 17290 */ hasClientHandlesHeartbeats()17291 boolean hasClientHandlesHeartbeats(); 17292 /** 17293 * <code>optional bool client_handles_heartbeats = 8;</code> 17294 */ getClientHandlesHeartbeats()17295 boolean getClientHandlesHeartbeats(); 17296 17297 // optional bool track_scan_metrics = 9; 17298 /** 17299 * <code>optional bool track_scan_metrics = 9;</code> 17300 */ hasTrackScanMetrics()17301 boolean hasTrackScanMetrics(); 17302 /** 17303 * <code>optional bool track_scan_metrics = 9;</code> 17304 */ getTrackScanMetrics()17305 boolean getTrackScanMetrics(); 17306 17307 // optional bool renew = 10 [default = false]; 17308 /** 17309 * <code>optional bool renew = 10 [default = false];</code> 17310 */ hasRenew()17311 boolean hasRenew(); 17312 /** 17313 * <code>optional bool renew = 10 [default = false];</code> 17314 */ getRenew()17315 boolean getRenew(); 17316 } 17317 /** 17318 * Protobuf type {@code ScanRequest} 17319 * 17320 * <pre> 17321 ** 17322 * A scan request. Initially, it should specify a scan. Later on, you 17323 * can use the scanner id returned to fetch result batches with a different 17324 * scan request. 17325 * 17326 * The scanner will remain open if there are more results, and it's not 17327 * asked to be closed explicitly. 17328 * 17329 * You can fetch the results and ask the scanner to be closed to save 17330 * a trip if you are not interested in remaining results. 17331 * </pre> 17332 */ 17333 public static final class ScanRequest extends 17334 com.google.protobuf.GeneratedMessage 17335 implements ScanRequestOrBuilder { 17336 // Use ScanRequest.newBuilder() to construct. ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)17337 private ScanRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 17338 super(builder); 17339 this.unknownFields = builder.getUnknownFields(); 17340 } ScanRequest(boolean noInit)17341 private ScanRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 17342 17343 private static final ScanRequest defaultInstance; getDefaultInstance()17344 public static ScanRequest getDefaultInstance() { 17345 return defaultInstance; 17346 } 17347 getDefaultInstanceForType()17348 public ScanRequest getDefaultInstanceForType() { 17349 return defaultInstance; 17350 } 17351 17352 private final com.google.protobuf.UnknownFieldSet unknownFields; 17353 @java.lang.Override 17354 public final com.google.protobuf.UnknownFieldSet getUnknownFields()17355 getUnknownFields() { 17356 return this.unknownFields; 17357 } ScanRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17358 private ScanRequest( 17359 com.google.protobuf.CodedInputStream input, 17360 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17361 throws com.google.protobuf.InvalidProtocolBufferException { 17362 initFields(); 17363 int mutable_bitField0_ = 0; 17364 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 17365 com.google.protobuf.UnknownFieldSet.newBuilder(); 17366 try { 17367 boolean done = false; 17368 while (!done) { 17369 int tag = input.readTag(); 17370 switch (tag) { 17371 case 0: 17372 done = true; 17373 break; 17374 default: { 17375 if (!parseUnknownField(input, unknownFields, 17376 extensionRegistry, tag)) { 17377 done = true; 17378 } 17379 break; 17380 } 17381 case 10: { 17382 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 17383 if (((bitField0_ & 0x00000001) == 0x00000001)) { 17384 subBuilder = region_.toBuilder(); 17385 } 17386 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 17387 if (subBuilder != null) { 17388 subBuilder.mergeFrom(region_); 17389 region_ = subBuilder.buildPartial(); 17390 } 17391 bitField0_ |= 0x00000001; 17392 break; 17393 } 17394 case 18: { 17395 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; 17396 if (((bitField0_ & 0x00000002) == 0x00000002)) { 17397 subBuilder = scan_.toBuilder(); 17398 } 17399 scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); 17400 if (subBuilder != null) { 17401 subBuilder.mergeFrom(scan_); 17402 scan_ = subBuilder.buildPartial(); 17403 } 17404 bitField0_ |= 0x00000002; 17405 break; 17406 } 17407 case 24: { 17408 bitField0_ |= 0x00000004; 17409 scannerId_ = input.readUInt64(); 17410 break; 17411 } 17412 case 32: { 17413 bitField0_ |= 0x00000008; 17414 numberOfRows_ = input.readUInt32(); 17415 break; 17416 } 17417 case 40: { 17418 bitField0_ |= 0x00000010; 17419 closeScanner_ = input.readBool(); 17420 break; 17421 } 17422 case 48: { 17423 bitField0_ |= 0x00000020; 17424 nextCallSeq_ = input.readUInt64(); 17425 break; 17426 } 17427 case 56: { 17428 bitField0_ |= 0x00000040; 17429 clientHandlesPartials_ = input.readBool(); 17430 break; 17431 } 17432 case 64: { 17433 bitField0_ |= 0x00000080; 17434 clientHandlesHeartbeats_ = input.readBool(); 17435 break; 17436 } 17437 case 72: { 17438 bitField0_ |= 0x00000100; 17439 trackScanMetrics_ = input.readBool(); 17440 break; 17441 } 17442 case 80: { 17443 bitField0_ |= 0x00000200; 17444 renew_ = input.readBool(); 17445 break; 17446 } 17447 } 17448 } 17449 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 17450 throw e.setUnfinishedMessage(this); 17451 } catch (java.io.IOException e) { 17452 throw new com.google.protobuf.InvalidProtocolBufferException( 17453 e.getMessage()).setUnfinishedMessage(this); 17454 } finally { 17455 this.unknownFields = unknownFields.build(); 17456 makeExtensionsImmutable(); 17457 } 17458 } 17459 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()17460 getDescriptor() { 17461 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; 17462 } 17463 17464 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()17465 internalGetFieldAccessorTable() { 17466 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable 17467 .ensureFieldAccessorsInitialized( 17468 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); 17469 } 17470 17471 public static com.google.protobuf.Parser<ScanRequest> PARSER = 17472 new com.google.protobuf.AbstractParser<ScanRequest>() { 17473 public ScanRequest parsePartialFrom( 17474 com.google.protobuf.CodedInputStream input, 17475 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17476 throws com.google.protobuf.InvalidProtocolBufferException { 17477 return new ScanRequest(input, extensionRegistry); 17478 } 17479 }; 17480 17481 @java.lang.Override getParserForType()17482 public com.google.protobuf.Parser<ScanRequest> getParserForType() { 17483 return PARSER; 17484 } 17485 17486 private int bitField0_; 17487 // optional .RegionSpecifier region = 1; 17488 public static final int REGION_FIELD_NUMBER = 1; 17489 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 17490 /** 17491 * <code>optional .RegionSpecifier region = 1;</code> 17492 */ hasRegion()17493 public boolean hasRegion() { 17494 return ((bitField0_ & 0x00000001) == 0x00000001); 17495 } 17496 /** 17497 * <code>optional .RegionSpecifier region = 1;</code> 17498 */ getRegion()17499 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 17500 return region_; 17501 } 17502 /** 17503 * <code>optional .RegionSpecifier region = 1;</code> 17504 */ getRegionOrBuilder()17505 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 17506 return region_; 17507 } 17508 17509 // optional .Scan scan = 2; 17510 public static final int SCAN_FIELD_NUMBER = 2; 17511 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; 17512 /** 17513 * <code>optional .Scan scan = 2;</code> 17514 */ hasScan()17515 public boolean hasScan() { 17516 return ((bitField0_ & 0x00000002) == 0x00000002); 17517 } 17518 /** 17519 * <code>optional .Scan scan = 2;</code> 17520 */ getScan()17521 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { 17522 return scan_; 17523 } 17524 /** 17525 * <code>optional .Scan scan = 2;</code> 17526 */ getScanOrBuilder()17527 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { 17528 return scan_; 17529 } 17530 17531 // optional uint64 scanner_id = 3; 17532 public static final int SCANNER_ID_FIELD_NUMBER = 3; 17533 private long scannerId_; 17534 /** 17535 * <code>optional uint64 scanner_id = 3;</code> 17536 */ hasScannerId()17537 public boolean hasScannerId() { 17538 return ((bitField0_ & 0x00000004) == 0x00000004); 17539 } 17540 /** 17541 * <code>optional uint64 scanner_id = 3;</code> 17542 */ getScannerId()17543 public long getScannerId() { 17544 return scannerId_; 17545 } 17546 17547 // optional uint32 number_of_rows = 4; 17548 public static final int NUMBER_OF_ROWS_FIELD_NUMBER = 4; 17549 private int numberOfRows_; 17550 /** 17551 * <code>optional uint32 number_of_rows = 4;</code> 17552 */ hasNumberOfRows()17553 public boolean hasNumberOfRows() { 17554 return ((bitField0_ & 0x00000008) == 0x00000008); 17555 } 17556 /** 17557 * <code>optional uint32 number_of_rows = 4;</code> 17558 */ getNumberOfRows()17559 public int getNumberOfRows() { 17560 return numberOfRows_; 17561 } 17562 17563 // optional bool close_scanner = 5; 17564 public static final int CLOSE_SCANNER_FIELD_NUMBER = 5; 17565 private boolean closeScanner_; 17566 /** 17567 * <code>optional bool close_scanner = 5;</code> 17568 */ hasCloseScanner()17569 public boolean hasCloseScanner() { 17570 return ((bitField0_ & 0x00000010) == 0x00000010); 17571 } 17572 /** 17573 * <code>optional bool close_scanner = 5;</code> 17574 */ getCloseScanner()17575 public boolean getCloseScanner() { 17576 return closeScanner_; 17577 } 17578 17579 // optional uint64 next_call_seq = 6; 17580 public static final int NEXT_CALL_SEQ_FIELD_NUMBER = 6; 17581 private long nextCallSeq_; 17582 /** 17583 * <code>optional uint64 next_call_seq = 6;</code> 17584 */ hasNextCallSeq()17585 public boolean hasNextCallSeq() { 17586 return ((bitField0_ & 0x00000020) == 0x00000020); 17587 } 17588 /** 17589 * <code>optional uint64 next_call_seq = 6;</code> 17590 */ getNextCallSeq()17591 public long getNextCallSeq() { 17592 return nextCallSeq_; 17593 } 17594 17595 // optional bool client_handles_partials = 7; 17596 public static final int CLIENT_HANDLES_PARTIALS_FIELD_NUMBER = 7; 17597 private boolean clientHandlesPartials_; 17598 /** 17599 * <code>optional bool client_handles_partials = 7;</code> 17600 */ hasClientHandlesPartials()17601 public boolean hasClientHandlesPartials() { 17602 return ((bitField0_ & 0x00000040) == 0x00000040); 17603 } 17604 /** 17605 * <code>optional bool client_handles_partials = 7;</code> 17606 */ getClientHandlesPartials()17607 public boolean getClientHandlesPartials() { 17608 return clientHandlesPartials_; 17609 } 17610 17611 // optional bool client_handles_heartbeats = 8; 17612 public static final int CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER = 8; 17613 private boolean clientHandlesHeartbeats_; 17614 /** 17615 * <code>optional bool client_handles_heartbeats = 8;</code> 17616 */ hasClientHandlesHeartbeats()17617 public boolean hasClientHandlesHeartbeats() { 17618 return ((bitField0_ & 0x00000080) == 0x00000080); 17619 } 17620 /** 17621 * <code>optional bool client_handles_heartbeats = 8;</code> 17622 */ getClientHandlesHeartbeats()17623 public boolean getClientHandlesHeartbeats() { 17624 return clientHandlesHeartbeats_; 17625 } 17626 17627 // optional bool track_scan_metrics = 9; 17628 public static final int TRACK_SCAN_METRICS_FIELD_NUMBER = 9; 17629 private boolean trackScanMetrics_; 17630 /** 17631 * <code>optional bool track_scan_metrics = 9;</code> 17632 */ hasTrackScanMetrics()17633 public boolean hasTrackScanMetrics() { 17634 return ((bitField0_ & 0x00000100) == 0x00000100); 17635 } 17636 /** 17637 * <code>optional bool track_scan_metrics = 9;</code> 17638 */ getTrackScanMetrics()17639 public boolean getTrackScanMetrics() { 17640 return trackScanMetrics_; 17641 } 17642 17643 // optional bool renew = 10 [default = false]; 17644 public static final int RENEW_FIELD_NUMBER = 10; 17645 private boolean renew_; 17646 /** 17647 * <code>optional bool renew = 10 [default = false];</code> 17648 */ hasRenew()17649 public boolean hasRenew() { 17650 return ((bitField0_ & 0x00000200) == 0x00000200); 17651 } 17652 /** 17653 * <code>optional bool renew = 10 [default = false];</code> 17654 */ getRenew()17655 public boolean getRenew() { 17656 return renew_; 17657 } 17658 initFields()17659 private void initFields() { 17660 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 17661 scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); 17662 scannerId_ = 0L; 17663 numberOfRows_ = 0; 17664 closeScanner_ = false; 17665 nextCallSeq_ = 0L; 17666 clientHandlesPartials_ = false; 17667 clientHandlesHeartbeats_ = false; 17668 trackScanMetrics_ = false; 17669 renew_ = false; 17670 } 17671 private byte memoizedIsInitialized = -1; isInitialized()17672 public final boolean isInitialized() { 17673 byte isInitialized = memoizedIsInitialized; 17674 if (isInitialized != -1) return isInitialized == 1; 17675 17676 if (hasRegion()) { 17677 if (!getRegion().isInitialized()) { 17678 memoizedIsInitialized = 0; 17679 return false; 17680 } 17681 } 17682 if (hasScan()) { 17683 if (!getScan().isInitialized()) { 17684 memoizedIsInitialized = 0; 17685 return false; 17686 } 17687 } 17688 memoizedIsInitialized = 1; 17689 return true; 17690 } 17691 writeTo(com.google.protobuf.CodedOutputStream output)17692 public void writeTo(com.google.protobuf.CodedOutputStream output) 17693 throws java.io.IOException { 17694 getSerializedSize(); 17695 if (((bitField0_ & 0x00000001) == 0x00000001)) { 17696 output.writeMessage(1, region_); 17697 } 17698 if (((bitField0_ & 0x00000002) == 0x00000002)) { 17699 output.writeMessage(2, scan_); 17700 } 17701 if (((bitField0_ & 0x00000004) == 0x00000004)) { 17702 output.writeUInt64(3, scannerId_); 17703 } 17704 if (((bitField0_ & 0x00000008) == 0x00000008)) { 17705 output.writeUInt32(4, numberOfRows_); 17706 } 17707 if (((bitField0_ & 0x00000010) == 0x00000010)) { 17708 output.writeBool(5, closeScanner_); 17709 } 17710 if (((bitField0_ & 0x00000020) == 0x00000020)) { 17711 output.writeUInt64(6, nextCallSeq_); 17712 } 17713 if (((bitField0_ & 0x00000040) == 0x00000040)) { 17714 output.writeBool(7, clientHandlesPartials_); 17715 } 17716 if (((bitField0_ & 0x00000080) == 0x00000080)) { 17717 output.writeBool(8, clientHandlesHeartbeats_); 17718 } 17719 if (((bitField0_ & 0x00000100) == 0x00000100)) { 17720 output.writeBool(9, trackScanMetrics_); 17721 } 17722 if (((bitField0_ & 0x00000200) == 0x00000200)) { 17723 output.writeBool(10, renew_); 17724 } 17725 getUnknownFields().writeTo(output); 17726 } 17727 17728 private int memoizedSerializedSize = -1; getSerializedSize()17729 public int getSerializedSize() { 17730 int size = memoizedSerializedSize; 17731 if (size != -1) return size; 17732 17733 size = 0; 17734 if (((bitField0_ & 0x00000001) == 0x00000001)) { 17735 size += com.google.protobuf.CodedOutputStream 17736 .computeMessageSize(1, region_); 17737 } 17738 if (((bitField0_ & 0x00000002) == 0x00000002)) { 17739 size += com.google.protobuf.CodedOutputStream 17740 .computeMessageSize(2, scan_); 17741 } 17742 if (((bitField0_ & 0x00000004) == 0x00000004)) { 17743 size += com.google.protobuf.CodedOutputStream 17744 .computeUInt64Size(3, scannerId_); 17745 } 17746 if (((bitField0_ & 0x00000008) == 0x00000008)) { 17747 size += com.google.protobuf.CodedOutputStream 17748 .computeUInt32Size(4, numberOfRows_); 17749 } 17750 if (((bitField0_ & 0x00000010) == 0x00000010)) { 17751 size += com.google.protobuf.CodedOutputStream 17752 .computeBoolSize(5, closeScanner_); 17753 } 17754 if (((bitField0_ & 0x00000020) == 0x00000020)) { 17755 size += com.google.protobuf.CodedOutputStream 17756 .computeUInt64Size(6, nextCallSeq_); 17757 } 17758 if (((bitField0_ & 0x00000040) == 0x00000040)) { 17759 size += com.google.protobuf.CodedOutputStream 17760 .computeBoolSize(7, clientHandlesPartials_); 17761 } 17762 if (((bitField0_ & 0x00000080) == 0x00000080)) { 17763 size += com.google.protobuf.CodedOutputStream 17764 .computeBoolSize(8, clientHandlesHeartbeats_); 17765 } 17766 if (((bitField0_ & 0x00000100) == 0x00000100)) { 17767 size += com.google.protobuf.CodedOutputStream 17768 .computeBoolSize(9, trackScanMetrics_); 17769 } 17770 if (((bitField0_ & 0x00000200) == 0x00000200)) { 17771 size += com.google.protobuf.CodedOutputStream 17772 .computeBoolSize(10, renew_); 17773 } 17774 size += getUnknownFields().getSerializedSize(); 17775 memoizedSerializedSize = size; 17776 return size; 17777 } 17778 17779 private static final long serialVersionUID = 0L; 17780 @java.lang.Override writeReplace()17781 protected java.lang.Object writeReplace() 17782 throws java.io.ObjectStreamException { 17783 return super.writeReplace(); 17784 } 17785 17786 @java.lang.Override equals(final java.lang.Object obj)17787 public boolean equals(final java.lang.Object obj) { 17788 if (obj == this) { 17789 return true; 17790 } 17791 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)) { 17792 return super.equals(obj); 17793 } 17794 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) obj; 17795 17796 boolean result = true; 17797 result = result && (hasRegion() == other.hasRegion()); 17798 if (hasRegion()) { 17799 result = result && getRegion() 17800 .equals(other.getRegion()); 17801 } 17802 result = result && (hasScan() == other.hasScan()); 17803 if (hasScan()) { 17804 result = result && getScan() 17805 .equals(other.getScan()); 17806 } 17807 result = result && (hasScannerId() == other.hasScannerId()); 17808 if (hasScannerId()) { 17809 result = result && (getScannerId() 17810 == other.getScannerId()); 17811 } 17812 result = result && (hasNumberOfRows() == other.hasNumberOfRows()); 17813 if (hasNumberOfRows()) { 17814 result = result && (getNumberOfRows() 17815 == other.getNumberOfRows()); 17816 } 17817 result = result && (hasCloseScanner() == other.hasCloseScanner()); 17818 if (hasCloseScanner()) { 17819 result = result && (getCloseScanner() 17820 == other.getCloseScanner()); 17821 } 17822 result = result && (hasNextCallSeq() == other.hasNextCallSeq()); 17823 if (hasNextCallSeq()) { 17824 result = result && (getNextCallSeq() 17825 == other.getNextCallSeq()); 17826 } 17827 result = result && (hasClientHandlesPartials() == other.hasClientHandlesPartials()); 17828 if (hasClientHandlesPartials()) { 17829 result = result && (getClientHandlesPartials() 17830 == other.getClientHandlesPartials()); 17831 } 17832 result = result && (hasClientHandlesHeartbeats() == other.hasClientHandlesHeartbeats()); 17833 if (hasClientHandlesHeartbeats()) { 17834 result = result && (getClientHandlesHeartbeats() 17835 == other.getClientHandlesHeartbeats()); 17836 } 17837 result = result && (hasTrackScanMetrics() == other.hasTrackScanMetrics()); 17838 if (hasTrackScanMetrics()) { 17839 result = result && (getTrackScanMetrics() 17840 == other.getTrackScanMetrics()); 17841 } 17842 result = result && (hasRenew() == other.hasRenew()); 17843 if (hasRenew()) { 17844 result = result && (getRenew() 17845 == other.getRenew()); 17846 } 17847 result = result && 17848 getUnknownFields().equals(other.getUnknownFields()); 17849 return result; 17850 } 17851 17852 private int memoizedHashCode = 0; 17853 @java.lang.Override hashCode()17854 public int hashCode() { 17855 if (memoizedHashCode != 0) { 17856 return memoizedHashCode; 17857 } 17858 int hash = 41; 17859 hash = (19 * hash) + getDescriptorForType().hashCode(); 17860 if (hasRegion()) { 17861 hash = (37 * hash) + REGION_FIELD_NUMBER; 17862 hash = (53 * hash) + getRegion().hashCode(); 17863 } 17864 if (hasScan()) { 17865 hash = (37 * hash) + SCAN_FIELD_NUMBER; 17866 hash = (53 * hash) + getScan().hashCode(); 17867 } 17868 if (hasScannerId()) { 17869 hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER; 17870 hash = (53 * hash) + hashLong(getScannerId()); 17871 } 17872 if (hasNumberOfRows()) { 17873 hash = (37 * hash) + NUMBER_OF_ROWS_FIELD_NUMBER; 17874 hash = (53 * hash) + getNumberOfRows(); 17875 } 17876 if (hasCloseScanner()) { 17877 hash = (37 * hash) + CLOSE_SCANNER_FIELD_NUMBER; 17878 hash = (53 * hash) + hashBoolean(getCloseScanner()); 17879 } 17880 if (hasNextCallSeq()) { 17881 hash = (37 * hash) + NEXT_CALL_SEQ_FIELD_NUMBER; 17882 hash = (53 * hash) + hashLong(getNextCallSeq()); 17883 } 17884 if (hasClientHandlesPartials()) { 17885 hash = (37 * hash) + CLIENT_HANDLES_PARTIALS_FIELD_NUMBER; 17886 hash = (53 * hash) + hashBoolean(getClientHandlesPartials()); 17887 } 17888 if (hasClientHandlesHeartbeats()) { 17889 hash = (37 * hash) + CLIENT_HANDLES_HEARTBEATS_FIELD_NUMBER; 17890 hash = (53 * hash) + hashBoolean(getClientHandlesHeartbeats()); 17891 } 17892 if (hasTrackScanMetrics()) { 17893 hash = (37 * hash) + TRACK_SCAN_METRICS_FIELD_NUMBER; 17894 hash = (53 * hash) + hashBoolean(getTrackScanMetrics()); 17895 } 17896 if (hasRenew()) { 17897 hash = (37 * hash) + RENEW_FIELD_NUMBER; 17898 hash = (53 * hash) + hashBoolean(getRenew()); 17899 } 17900 hash = (29 * hash) + getUnknownFields().hashCode(); 17901 memoizedHashCode = hash; 17902 return hash; 17903 } 17904 parseFrom( com.google.protobuf.ByteString data)17905 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( 17906 com.google.protobuf.ByteString data) 17907 throws com.google.protobuf.InvalidProtocolBufferException { 17908 return PARSER.parseFrom(data); 17909 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17910 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( 17911 com.google.protobuf.ByteString data, 17912 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17913 throws com.google.protobuf.InvalidProtocolBufferException { 17914 return PARSER.parseFrom(data, extensionRegistry); 17915 } parseFrom(byte[] data)17916 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(byte[] data) 17917 throws com.google.protobuf.InvalidProtocolBufferException { 17918 return PARSER.parseFrom(data); 17919 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17920 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( 17921 byte[] data, 17922 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17923 throws com.google.protobuf.InvalidProtocolBufferException { 17924 return PARSER.parseFrom(data, extensionRegistry); 17925 } parseFrom(java.io.InputStream input)17926 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom(java.io.InputStream input) 17927 throws java.io.IOException { 17928 return PARSER.parseFrom(input); 17929 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17930 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( 17931 java.io.InputStream input, 17932 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17933 throws java.io.IOException { 17934 return PARSER.parseFrom(input, extensionRegistry); 17935 } parseDelimitedFrom(java.io.InputStream input)17936 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) 17937 throws java.io.IOException { 17938 return PARSER.parseDelimitedFrom(input); 17939 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17940 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseDelimitedFrom( 17941 java.io.InputStream input, 17942 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17943 throws java.io.IOException { 17944 return PARSER.parseDelimitedFrom(input, extensionRegistry); 17945 } parseFrom( com.google.protobuf.CodedInputStream input)17946 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( 17947 com.google.protobuf.CodedInputStream input) 17948 throws java.io.IOException { 17949 return PARSER.parseFrom(input); 17950 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)17951 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parseFrom( 17952 com.google.protobuf.CodedInputStream input, 17953 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 17954 throws java.io.IOException { 17955 return PARSER.parseFrom(input, extensionRegistry); 17956 } 17957 newBuilder()17958 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()17959 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype)17960 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest prototype) { 17961 return newBuilder().mergeFrom(prototype); 17962 } toBuilder()17963 public Builder toBuilder() { return newBuilder(this); } 17964 17965 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)17966 protected Builder newBuilderForType( 17967 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 17968 Builder builder = new Builder(parent); 17969 return builder; 17970 } 17971 /** 17972 * Protobuf type {@code ScanRequest} 17973 * 17974 * <pre> 17975 ** 17976 * A scan request. Initially, it should specify a scan. Later on, you 17977 * can use the scanner id returned to fetch result batches with a different 17978 * scan request. 17979 * 17980 * The scanner will remain open if there are more results, and it's not 17981 * asked to be closed explicitly. 17982 * 17983 * You can fetch the results and ask the scanner to be closed to save 17984 * a trip if you are not interested in remaining results. 17985 * </pre> 17986 */ 17987 public static final class Builder extends 17988 com.google.protobuf.GeneratedMessage.Builder<Builder> 17989 implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequestOrBuilder { 17990 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()17991 getDescriptor() { 17992 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; 17993 } 17994 17995 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()17996 internalGetFieldAccessorTable() { 17997 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_fieldAccessorTable 17998 .ensureFieldAccessorsInitialized( 17999 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.Builder.class); 18000 } 18001 18002 // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.newBuilder() Builder()18003 private Builder() { 18004 maybeForceBuilderInitialization(); 18005 } 18006 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)18007 private Builder( 18008 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 18009 super(parent); 18010 maybeForceBuilderInitialization(); 18011 } maybeForceBuilderInitialization()18012 private void maybeForceBuilderInitialization() { 18013 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 18014 getRegionFieldBuilder(); 18015 getScanFieldBuilder(); 18016 } 18017 } create()18018 private static Builder create() { 18019 return new Builder(); 18020 } 18021 clear()18022 public Builder clear() { 18023 super.clear(); 18024 if (regionBuilder_ == null) { 18025 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 18026 } else { 18027 regionBuilder_.clear(); 18028 } 18029 bitField0_ = (bitField0_ & ~0x00000001); 18030 if (scanBuilder_ == null) { 18031 scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); 18032 } else { 18033 scanBuilder_.clear(); 18034 } 18035 bitField0_ = (bitField0_ & ~0x00000002); 18036 scannerId_ = 0L; 18037 bitField0_ = (bitField0_ & ~0x00000004); 18038 numberOfRows_ = 0; 18039 bitField0_ = (bitField0_ & ~0x00000008); 18040 closeScanner_ = false; 18041 bitField0_ = (bitField0_ & ~0x00000010); 18042 nextCallSeq_ = 0L; 18043 bitField0_ = (bitField0_ & ~0x00000020); 18044 clientHandlesPartials_ = false; 18045 bitField0_ = (bitField0_ & ~0x00000040); 18046 clientHandlesHeartbeats_ = false; 18047 bitField0_ = (bitField0_ & ~0x00000080); 18048 trackScanMetrics_ = false; 18049 bitField0_ = (bitField0_ & ~0x00000100); 18050 renew_ = false; 18051 bitField0_ = (bitField0_ & ~0x00000200); 18052 return this; 18053 } 18054 clone()18055 public Builder clone() { 18056 return create().mergeFrom(buildPartial()); 18057 } 18058 18059 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()18060 getDescriptorForType() { 18061 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanRequest_descriptor; 18062 } 18063 getDefaultInstanceForType()18064 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest getDefaultInstanceForType() { 18065 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance(); 18066 } 18067 build()18068 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest build() { 18069 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = buildPartial(); 18070 if (!result.isInitialized()) { 18071 throw newUninitializedMessageException(result); 18072 } 18073 return result; 18074 } 18075 buildPartial()18076 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest buildPartial() { 18077 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest(this); 18078 int from_bitField0_ = bitField0_; 18079 int to_bitField0_ = 0; 18080 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 18081 to_bitField0_ |= 0x00000001; 18082 } 18083 if (regionBuilder_ == null) { 18084 result.region_ = region_; 18085 } else { 18086 result.region_ = regionBuilder_.build(); 18087 } 18088 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 18089 to_bitField0_ |= 0x00000002; 18090 } 18091 if (scanBuilder_ == null) { 18092 result.scan_ = scan_; 18093 } else { 18094 result.scan_ = scanBuilder_.build(); 18095 } 18096 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 18097 to_bitField0_ |= 0x00000004; 18098 } 18099 result.scannerId_ = scannerId_; 18100 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 18101 to_bitField0_ |= 0x00000008; 18102 } 18103 result.numberOfRows_ = numberOfRows_; 18104 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 18105 to_bitField0_ |= 0x00000010; 18106 } 18107 result.closeScanner_ = closeScanner_; 18108 if (((from_bitField0_ & 0x00000020) == 0x00000020)) { 18109 to_bitField0_ |= 0x00000020; 18110 } 18111 result.nextCallSeq_ = nextCallSeq_; 18112 if (((from_bitField0_ & 0x00000040) == 0x00000040)) { 18113 to_bitField0_ |= 0x00000040; 18114 } 18115 result.clientHandlesPartials_ = clientHandlesPartials_; 18116 if (((from_bitField0_ & 0x00000080) == 0x00000080)) { 18117 to_bitField0_ |= 0x00000080; 18118 } 18119 result.clientHandlesHeartbeats_ = clientHandlesHeartbeats_; 18120 if (((from_bitField0_ & 0x00000100) == 0x00000100)) { 18121 to_bitField0_ |= 0x00000100; 18122 } 18123 result.trackScanMetrics_ = trackScanMetrics_; 18124 if (((from_bitField0_ & 0x00000200) == 0x00000200)) { 18125 to_bitField0_ |= 0x00000200; 18126 } 18127 result.renew_ = renew_; 18128 result.bitField0_ = to_bitField0_; 18129 onBuilt(); 18130 return result; 18131 } 18132 mergeFrom(com.google.protobuf.Message other)18133 public Builder mergeFrom(com.google.protobuf.Message other) { 18134 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) { 18135 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest)other); 18136 } else { 18137 super.mergeFrom(other); 18138 return this; 18139 } 18140 } 18141 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other)18142 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest other) { 18143 if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest.getDefaultInstance()) return this; 18144 if (other.hasRegion()) { 18145 mergeRegion(other.getRegion()); 18146 } 18147 if (other.hasScan()) { 18148 mergeScan(other.getScan()); 18149 } 18150 if (other.hasScannerId()) { 18151 setScannerId(other.getScannerId()); 18152 } 18153 if (other.hasNumberOfRows()) { 18154 setNumberOfRows(other.getNumberOfRows()); 18155 } 18156 if (other.hasCloseScanner()) { 18157 setCloseScanner(other.getCloseScanner()); 18158 } 18159 if (other.hasNextCallSeq()) { 18160 setNextCallSeq(other.getNextCallSeq()); 18161 } 18162 if (other.hasClientHandlesPartials()) { 18163 setClientHandlesPartials(other.getClientHandlesPartials()); 18164 } 18165 if (other.hasClientHandlesHeartbeats()) { 18166 setClientHandlesHeartbeats(other.getClientHandlesHeartbeats()); 18167 } 18168 if (other.hasTrackScanMetrics()) { 18169 setTrackScanMetrics(other.getTrackScanMetrics()); 18170 } 18171 if (other.hasRenew()) { 18172 setRenew(other.getRenew()); 18173 } 18174 this.mergeUnknownFields(other.getUnknownFields()); 18175 return this; 18176 } 18177 isInitialized()18178 public final boolean isInitialized() { 18179 if (hasRegion()) { 18180 if (!getRegion().isInitialized()) { 18181 18182 return false; 18183 } 18184 } 18185 if (hasScan()) { 18186 if (!getScan().isInitialized()) { 18187 18188 return false; 18189 } 18190 } 18191 return true; 18192 } 18193 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)18194 public Builder mergeFrom( 18195 com.google.protobuf.CodedInputStream input, 18196 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 18197 throws java.io.IOException { 18198 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest parsedMessage = null; 18199 try { 18200 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 18201 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 18202 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanRequest) e.getUnfinishedMessage(); 18203 throw e; 18204 } finally { 18205 if (parsedMessage != null) { 18206 mergeFrom(parsedMessage); 18207 } 18208 } 18209 return this; 18210 } 18211 private int bitField0_; 18212 18213 // optional .RegionSpecifier region = 1; 18214 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 18215 private com.google.protobuf.SingleFieldBuilder< 18216 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 18217 /** 18218 * <code>optional .RegionSpecifier region = 1;</code> 18219 */ hasRegion()18220 public boolean hasRegion() { 18221 return ((bitField0_ & 0x00000001) == 0x00000001); 18222 } 18223 /** 18224 * <code>optional .RegionSpecifier region = 1;</code> 18225 */ getRegion()18226 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 18227 if (regionBuilder_ == null) { 18228 return region_; 18229 } else { 18230 return regionBuilder_.getMessage(); 18231 } 18232 } 18233 /** 18234 * <code>optional .RegionSpecifier region = 1;</code> 18235 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)18236 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 18237 if (regionBuilder_ == null) { 18238 if (value == null) { 18239 throw new NullPointerException(); 18240 } 18241 region_ = value; 18242 onChanged(); 18243 } else { 18244 regionBuilder_.setMessage(value); 18245 } 18246 bitField0_ |= 0x00000001; 18247 return this; 18248 } 18249 /** 18250 * <code>optional .RegionSpecifier region = 1;</code> 18251 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)18252 public Builder setRegion( 18253 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 18254 if (regionBuilder_ == null) { 18255 region_ = builderForValue.build(); 18256 onChanged(); 18257 } else { 18258 regionBuilder_.setMessage(builderForValue.build()); 18259 } 18260 bitField0_ |= 0x00000001; 18261 return this; 18262 } 18263 /** 18264 * <code>optional .RegionSpecifier region = 1;</code> 18265 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)18266 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 18267 if (regionBuilder_ == null) { 18268 if (((bitField0_ & 0x00000001) == 0x00000001) && 18269 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 18270 region_ = 18271 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 18272 } else { 18273 region_ = value; 18274 } 18275 onChanged(); 18276 } else { 18277 regionBuilder_.mergeFrom(value); 18278 } 18279 bitField0_ |= 0x00000001; 18280 return this; 18281 } 18282 /** 18283 * <code>optional .RegionSpecifier region = 1;</code> 18284 */ clearRegion()18285 public Builder clearRegion() { 18286 if (regionBuilder_ == null) { 18287 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 18288 onChanged(); 18289 } else { 18290 regionBuilder_.clear(); 18291 } 18292 bitField0_ = (bitField0_ & ~0x00000001); 18293 return this; 18294 } 18295 /** 18296 * <code>optional .RegionSpecifier region = 1;</code> 18297 */ getRegionBuilder()18298 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 18299 bitField0_ |= 0x00000001; 18300 onChanged(); 18301 return getRegionFieldBuilder().getBuilder(); 18302 } 18303 /** 18304 * <code>optional .RegionSpecifier region = 1;</code> 18305 */ getRegionOrBuilder()18306 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 18307 if (regionBuilder_ != null) { 18308 return regionBuilder_.getMessageOrBuilder(); 18309 } else { 18310 return region_; 18311 } 18312 } 18313 /** 18314 * <code>optional .RegionSpecifier region = 1;</code> 18315 */ 18316 private com.google.protobuf.SingleFieldBuilder< 18317 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()18318 getRegionFieldBuilder() { 18319 if (regionBuilder_ == null) { 18320 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 18321 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 18322 region_, 18323 getParentForChildren(), 18324 isClean()); 18325 region_ = null; 18326 } 18327 return regionBuilder_; 18328 } 18329 18330 // optional .Scan scan = 2; 18331 private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); 18332 private com.google.protobuf.SingleFieldBuilder< 18333 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; 18334 /** 18335 * <code>optional .Scan scan = 2;</code> 18336 */ hasScan()18337 public boolean hasScan() { 18338 return ((bitField0_ & 0x00000002) == 0x00000002); 18339 } 18340 /** 18341 * <code>optional .Scan scan = 2;</code> 18342 */ getScan()18343 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { 18344 if (scanBuilder_ == null) { 18345 return scan_; 18346 } else { 18347 return scanBuilder_.getMessage(); 18348 } 18349 } 18350 /** 18351 * <code>optional .Scan scan = 2;</code> 18352 */ setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value)18353 public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { 18354 if (scanBuilder_ == null) { 18355 if (value == null) { 18356 throw new NullPointerException(); 18357 } 18358 scan_ = value; 18359 onChanged(); 18360 } else { 18361 scanBuilder_.setMessage(value); 18362 } 18363 bitField0_ |= 0x00000002; 18364 return this; 18365 } 18366 /** 18367 * <code>optional .Scan scan = 2;</code> 18368 */ setScan( org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue)18369 public Builder setScan( 18370 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { 18371 if (scanBuilder_ == null) { 18372 scan_ = builderForValue.build(); 18373 onChanged(); 18374 } else { 18375 scanBuilder_.setMessage(builderForValue.build()); 18376 } 18377 bitField0_ |= 0x00000002; 18378 return this; 18379 } 18380 /** 18381 * <code>optional .Scan scan = 2;</code> 18382 */ mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value)18383 public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { 18384 if (scanBuilder_ == null) { 18385 if (((bitField0_ & 0x00000002) == 0x00000002) && 18386 scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { 18387 scan_ = 18388 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); 18389 } else { 18390 scan_ = value; 18391 } 18392 onChanged(); 18393 } else { 18394 scanBuilder_.mergeFrom(value); 18395 } 18396 bitField0_ |= 0x00000002; 18397 return this; 18398 } 18399 /** 18400 * <code>optional .Scan scan = 2;</code> 18401 */ clearScan()18402 public Builder clearScan() { 18403 if (scanBuilder_ == null) { 18404 scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); 18405 onChanged(); 18406 } else { 18407 scanBuilder_.clear(); 18408 } 18409 bitField0_ = (bitField0_ & ~0x00000002); 18410 return this; 18411 } 18412 /** 18413 * <code>optional .Scan scan = 2;</code> 18414 */ getScanBuilder()18415 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { 18416 bitField0_ |= 0x00000002; 18417 onChanged(); 18418 return getScanFieldBuilder().getBuilder(); 18419 } 18420 /** 18421 * <code>optional .Scan scan = 2;</code> 18422 */ getScanOrBuilder()18423 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { 18424 if (scanBuilder_ != null) { 18425 return scanBuilder_.getMessageOrBuilder(); 18426 } else { 18427 return scan_; 18428 } 18429 } 18430 /** 18431 * <code>optional .Scan scan = 2;</code> 18432 */ 18433 private com.google.protobuf.SingleFieldBuilder< 18434 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> getScanFieldBuilder()18435 getScanFieldBuilder() { 18436 if (scanBuilder_ == null) { 18437 scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< 18438 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( 18439 scan_, 18440 getParentForChildren(), 18441 isClean()); 18442 scan_ = null; 18443 } 18444 return scanBuilder_; 18445 } 18446 18447 // optional uint64 scanner_id = 3; 18448 private long scannerId_ ; 18449 /** 18450 * <code>optional uint64 scanner_id = 3;</code> 18451 */ hasScannerId()18452 public boolean hasScannerId() { 18453 return ((bitField0_ & 0x00000004) == 0x00000004); 18454 } 18455 /** 18456 * <code>optional uint64 scanner_id = 3;</code> 18457 */ getScannerId()18458 public long getScannerId() { 18459 return scannerId_; 18460 } 18461 /** 18462 * <code>optional uint64 scanner_id = 3;</code> 18463 */ setScannerId(long value)18464 public Builder setScannerId(long value) { 18465 bitField0_ |= 0x00000004; 18466 scannerId_ = value; 18467 onChanged(); 18468 return this; 18469 } 18470 /** 18471 * <code>optional uint64 scanner_id = 3;</code> 18472 */ clearScannerId()18473 public Builder clearScannerId() { 18474 bitField0_ = (bitField0_ & ~0x00000004); 18475 scannerId_ = 0L; 18476 onChanged(); 18477 return this; 18478 } 18479 18480 // optional uint32 number_of_rows = 4; 18481 private int numberOfRows_ ; 18482 /** 18483 * <code>optional uint32 number_of_rows = 4;</code> 18484 */ hasNumberOfRows()18485 public boolean hasNumberOfRows() { 18486 return ((bitField0_ & 0x00000008) == 0x00000008); 18487 } 18488 /** 18489 * <code>optional uint32 number_of_rows = 4;</code> 18490 */ getNumberOfRows()18491 public int getNumberOfRows() { 18492 return numberOfRows_; 18493 } 18494 /** 18495 * <code>optional uint32 number_of_rows = 4;</code> 18496 */ setNumberOfRows(int value)18497 public Builder setNumberOfRows(int value) { 18498 bitField0_ |= 0x00000008; 18499 numberOfRows_ = value; 18500 onChanged(); 18501 return this; 18502 } 18503 /** 18504 * <code>optional uint32 number_of_rows = 4;</code> 18505 */ clearNumberOfRows()18506 public Builder clearNumberOfRows() { 18507 bitField0_ = (bitField0_ & ~0x00000008); 18508 numberOfRows_ = 0; 18509 onChanged(); 18510 return this; 18511 } 18512 18513 // optional bool close_scanner = 5; 18514 private boolean closeScanner_ ; 18515 /** 18516 * <code>optional bool close_scanner = 5;</code> 18517 */ hasCloseScanner()18518 public boolean hasCloseScanner() { 18519 return ((bitField0_ & 0x00000010) == 0x00000010); 18520 } 18521 /** 18522 * <code>optional bool close_scanner = 5;</code> 18523 */ getCloseScanner()18524 public boolean getCloseScanner() { 18525 return closeScanner_; 18526 } 18527 /** 18528 * <code>optional bool close_scanner = 5;</code> 18529 */ setCloseScanner(boolean value)18530 public Builder setCloseScanner(boolean value) { 18531 bitField0_ |= 0x00000010; 18532 closeScanner_ = value; 18533 onChanged(); 18534 return this; 18535 } 18536 /** 18537 * <code>optional bool close_scanner = 5;</code> 18538 */ clearCloseScanner()18539 public Builder clearCloseScanner() { 18540 bitField0_ = (bitField0_ & ~0x00000010); 18541 closeScanner_ = false; 18542 onChanged(); 18543 return this; 18544 } 18545 18546 // optional uint64 next_call_seq = 6; 18547 private long nextCallSeq_ ; 18548 /** 18549 * <code>optional uint64 next_call_seq = 6;</code> 18550 */ hasNextCallSeq()18551 public boolean hasNextCallSeq() { 18552 return ((bitField0_ & 0x00000020) == 0x00000020); 18553 } 18554 /** 18555 * <code>optional uint64 next_call_seq = 6;</code> 18556 */ getNextCallSeq()18557 public long getNextCallSeq() { 18558 return nextCallSeq_; 18559 } 18560 /** 18561 * <code>optional uint64 next_call_seq = 6;</code> 18562 */ setNextCallSeq(long value)18563 public Builder setNextCallSeq(long value) { 18564 bitField0_ |= 0x00000020; 18565 nextCallSeq_ = value; 18566 onChanged(); 18567 return this; 18568 } 18569 /** 18570 * <code>optional uint64 next_call_seq = 6;</code> 18571 */ clearNextCallSeq()18572 public Builder clearNextCallSeq() { 18573 bitField0_ = (bitField0_ & ~0x00000020); 18574 nextCallSeq_ = 0L; 18575 onChanged(); 18576 return this; 18577 } 18578 18579 // optional bool client_handles_partials = 7; 18580 private boolean clientHandlesPartials_ ; 18581 /** 18582 * <code>optional bool client_handles_partials = 7;</code> 18583 */ hasClientHandlesPartials()18584 public boolean hasClientHandlesPartials() { 18585 return ((bitField0_ & 0x00000040) == 0x00000040); 18586 } 18587 /** 18588 * <code>optional bool client_handles_partials = 7;</code> 18589 */ getClientHandlesPartials()18590 public boolean getClientHandlesPartials() { 18591 return clientHandlesPartials_; 18592 } 18593 /** 18594 * <code>optional bool client_handles_partials = 7;</code> 18595 */ setClientHandlesPartials(boolean value)18596 public Builder setClientHandlesPartials(boolean value) { 18597 bitField0_ |= 0x00000040; 18598 clientHandlesPartials_ = value; 18599 onChanged(); 18600 return this; 18601 } 18602 /** 18603 * <code>optional bool client_handles_partials = 7;</code> 18604 */ clearClientHandlesPartials()18605 public Builder clearClientHandlesPartials() { 18606 bitField0_ = (bitField0_ & ~0x00000040); 18607 clientHandlesPartials_ = false; 18608 onChanged(); 18609 return this; 18610 } 18611 18612 // optional bool client_handles_heartbeats = 8; 18613 private boolean clientHandlesHeartbeats_ ; 18614 /** 18615 * <code>optional bool client_handles_heartbeats = 8;</code> 18616 */ hasClientHandlesHeartbeats()18617 public boolean hasClientHandlesHeartbeats() { 18618 return ((bitField0_ & 0x00000080) == 0x00000080); 18619 } 18620 /** 18621 * <code>optional bool client_handles_heartbeats = 8;</code> 18622 */ getClientHandlesHeartbeats()18623 public boolean getClientHandlesHeartbeats() { 18624 return clientHandlesHeartbeats_; 18625 } 18626 /** 18627 * <code>optional bool client_handles_heartbeats = 8;</code> 18628 */ setClientHandlesHeartbeats(boolean value)18629 public Builder setClientHandlesHeartbeats(boolean value) { 18630 bitField0_ |= 0x00000080; 18631 clientHandlesHeartbeats_ = value; 18632 onChanged(); 18633 return this; 18634 } 18635 /** 18636 * <code>optional bool client_handles_heartbeats = 8;</code> 18637 */ clearClientHandlesHeartbeats()18638 public Builder clearClientHandlesHeartbeats() { 18639 bitField0_ = (bitField0_ & ~0x00000080); 18640 clientHandlesHeartbeats_ = false; 18641 onChanged(); 18642 return this; 18643 } 18644 18645 // optional bool track_scan_metrics = 9; 18646 private boolean trackScanMetrics_ ; 18647 /** 18648 * <code>optional bool track_scan_metrics = 9;</code> 18649 */ hasTrackScanMetrics()18650 public boolean hasTrackScanMetrics() { 18651 return ((bitField0_ & 0x00000100) == 0x00000100); 18652 } 18653 /** 18654 * <code>optional bool track_scan_metrics = 9;</code> 18655 */ getTrackScanMetrics()18656 public boolean getTrackScanMetrics() { 18657 return trackScanMetrics_; 18658 } 18659 /** 18660 * <code>optional bool track_scan_metrics = 9;</code> 18661 */ setTrackScanMetrics(boolean value)18662 public Builder setTrackScanMetrics(boolean value) { 18663 bitField0_ |= 0x00000100; 18664 trackScanMetrics_ = value; 18665 onChanged(); 18666 return this; 18667 } 18668 /** 18669 * <code>optional bool track_scan_metrics = 9;</code> 18670 */ clearTrackScanMetrics()18671 public Builder clearTrackScanMetrics() { 18672 bitField0_ = (bitField0_ & ~0x00000100); 18673 trackScanMetrics_ = false; 18674 onChanged(); 18675 return this; 18676 } 18677 18678 // optional bool renew = 10 [default = false]; 18679 private boolean renew_ ; 18680 /** 18681 * <code>optional bool renew = 10 [default = false];</code> 18682 */ hasRenew()18683 public boolean hasRenew() { 18684 return ((bitField0_ & 0x00000200) == 0x00000200); 18685 } 18686 /** 18687 * <code>optional bool renew = 10 [default = false];</code> 18688 */ getRenew()18689 public boolean getRenew() { 18690 return renew_; 18691 } 18692 /** 18693 * <code>optional bool renew = 10 [default = false];</code> 18694 */ setRenew(boolean value)18695 public Builder setRenew(boolean value) { 18696 bitField0_ |= 0x00000200; 18697 renew_ = value; 18698 onChanged(); 18699 return this; 18700 } 18701 /** 18702 * <code>optional bool renew = 10 [default = false];</code> 18703 */ clearRenew()18704 public Builder clearRenew() { 18705 bitField0_ = (bitField0_ & ~0x00000200); 18706 renew_ = false; 18707 onChanged(); 18708 return this; 18709 } 18710 18711 // @@protoc_insertion_point(builder_scope:ScanRequest) 18712 } 18713 18714 static { 18715 defaultInstance = new ScanRequest(true); defaultInstance.initFields()18716 defaultInstance.initFields(); 18717 } 18718 18719 // @@protoc_insertion_point(class_scope:ScanRequest) 18720 } 18721 18722 public interface ScanResponseOrBuilder 18723 extends com.google.protobuf.MessageOrBuilder { 18724 18725 // repeated uint32 cells_per_result = 1; 18726 /** 18727 * <code>repeated uint32 cells_per_result = 1;</code> 18728 * 18729 * <pre> 18730 * This field is filled in if we are doing cellblocks. A cellblock is made up 18731 * of all Cells serialized out as one cellblock BUT responses from a server 18732 * have their Cells grouped by Result. So we can reconstitute the 18733 * Results on the client-side, this field is a list of counts of Cells 18734 * in each Result that makes up the response. For example, if this field 18735 * has 3, 3, 3 in it, then we know that on the client, we are to make 18736 * three Results each of three Cells each. 18737 * </pre> 18738 */ getCellsPerResultList()18739 java.util.List<java.lang.Integer> getCellsPerResultList(); 18740 /** 18741 * <code>repeated uint32 cells_per_result = 1;</code> 18742 * 18743 * <pre> 18744 * This field is filled in if we are doing cellblocks. A cellblock is made up 18745 * of all Cells serialized out as one cellblock BUT responses from a server 18746 * have their Cells grouped by Result. So we can reconstitute the 18747 * Results on the client-side, this field is a list of counts of Cells 18748 * in each Result that makes up the response. For example, if this field 18749 * has 3, 3, 3 in it, then we know that on the client, we are to make 18750 * three Results each of three Cells each. 18751 * </pre> 18752 */ getCellsPerResultCount()18753 int getCellsPerResultCount(); 18754 /** 18755 * <code>repeated uint32 cells_per_result = 1;</code> 18756 * 18757 * <pre> 18758 * This field is filled in if we are doing cellblocks. A cellblock is made up 18759 * of all Cells serialized out as one cellblock BUT responses from a server 18760 * have their Cells grouped by Result. So we can reconstitute the 18761 * Results on the client-side, this field is a list of counts of Cells 18762 * in each Result that makes up the response. For example, if this field 18763 * has 3, 3, 3 in it, then we know that on the client, we are to make 18764 * three Results each of three Cells each. 18765 * </pre> 18766 */ getCellsPerResult(int index)18767 int getCellsPerResult(int index); 18768 18769 // optional uint64 scanner_id = 2; 18770 /** 18771 * <code>optional uint64 scanner_id = 2;</code> 18772 */ hasScannerId()18773 boolean hasScannerId(); 18774 /** 18775 * <code>optional uint64 scanner_id = 2;</code> 18776 */ getScannerId()18777 long getScannerId(); 18778 18779 // optional bool more_results = 3; 18780 /** 18781 * <code>optional bool more_results = 3;</code> 18782 */ hasMoreResults()18783 boolean hasMoreResults(); 18784 /** 18785 * <code>optional bool more_results = 3;</code> 18786 */ getMoreResults()18787 boolean getMoreResults(); 18788 18789 // optional uint32 ttl = 4; 18790 /** 18791 * <code>optional uint32 ttl = 4;</code> 18792 */ hasTtl()18793 boolean hasTtl(); 18794 /** 18795 * <code>optional uint32 ttl = 4;</code> 18796 */ getTtl()18797 int getTtl(); 18798 18799 // repeated .Result results = 5; 18800 /** 18801 * <code>repeated .Result results = 5;</code> 18802 * 18803 * <pre> 18804 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 18805 * This field is mutually exclusive with cells_per_result (since the Cells will 18806 * be inside the pb'd Result) 18807 * </pre> 18808 */ 18809 java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList()18810 getResultsList(); 18811 /** 18812 * <code>repeated .Result results = 5;</code> 18813 * 18814 * <pre> 18815 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 18816 * This field is mutually exclusive with cells_per_result (since the Cells will 18817 * be inside the pb'd Result) 18818 * </pre> 18819 */ getResults(int index)18820 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index); 18821 /** 18822 * <code>repeated .Result results = 5;</code> 18823 * 18824 * <pre> 18825 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 18826 * This field is mutually exclusive with cells_per_result (since the Cells will 18827 * be inside the pb'd Result) 18828 * </pre> 18829 */ getResultsCount()18830 int getResultsCount(); 18831 /** 18832 * <code>repeated .Result results = 5;</code> 18833 * 18834 * <pre> 18835 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 18836 * This field is mutually exclusive with cells_per_result (since the Cells will 18837 * be inside the pb'd Result) 18838 * </pre> 18839 */ 18840 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsOrBuilderList()18841 getResultsOrBuilderList(); 18842 /** 18843 * <code>repeated .Result results = 5;</code> 18844 * 18845 * <pre> 18846 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 18847 * This field is mutually exclusive with cells_per_result (since the Cells will 18848 * be inside the pb'd Result) 18849 * </pre> 18850 */ getResultsOrBuilder( int index)18851 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( 18852 int index); 18853 18854 // optional bool stale = 6; 18855 /** 18856 * <code>optional bool stale = 6;</code> 18857 */ hasStale()18858 boolean hasStale(); 18859 /** 18860 * <code>optional bool stale = 6;</code> 18861 */ getStale()18862 boolean getStale(); 18863 18864 // repeated bool partial_flag_per_result = 7; 18865 /** 18866 * <code>repeated bool partial_flag_per_result = 7;</code> 18867 * 18868 * <pre> 18869 * This field is filled in if we are doing cellblocks. In the event that a row 18870 * could not fit all of its cells into a single RPC chunk, the results will be 18871 * returned as partials, and reconstructed into a complete result on the client 18872 * side. This field is a list of flags indicating whether or not the result 18873 * that the cells belong to is a partial result. For example, if this field 18874 * has false, false, true in it, then we know that on the client side, we need to 18875 * make another RPC request since the last result was only a partial. 18876 * </pre> 18877 */ getPartialFlagPerResultList()18878 java.util.List<java.lang.Boolean> getPartialFlagPerResultList(); 18879 /** 18880 * <code>repeated bool partial_flag_per_result = 7;</code> 18881 * 18882 * <pre> 18883 * This field is filled in if we are doing cellblocks. In the event that a row 18884 * could not fit all of its cells into a single RPC chunk, the results will be 18885 * returned as partials, and reconstructed into a complete result on the client 18886 * side. This field is a list of flags indicating whether or not the result 18887 * that the cells belong to is a partial result. For example, if this field 18888 * has false, false, true in it, then we know that on the client side, we need to 18889 * make another RPC request since the last result was only a partial. 18890 * </pre> 18891 */ getPartialFlagPerResultCount()18892 int getPartialFlagPerResultCount(); 18893 /** 18894 * <code>repeated bool partial_flag_per_result = 7;</code> 18895 * 18896 * <pre> 18897 * This field is filled in if we are doing cellblocks. In the event that a row 18898 * could not fit all of its cells into a single RPC chunk, the results will be 18899 * returned as partials, and reconstructed into a complete result on the client 18900 * side. This field is a list of flags indicating whether or not the result 18901 * that the cells belong to is a partial result. For example, if this field 18902 * has false, false, true in it, then we know that on the client side, we need to 18903 * make another RPC request since the last result was only a partial. 18904 * </pre> 18905 */ getPartialFlagPerResult(int index)18906 boolean getPartialFlagPerResult(int index); 18907 18908 // optional bool more_results_in_region = 8; 18909 /** 18910 * <code>optional bool more_results_in_region = 8;</code> 18911 * 18912 * <pre> 18913 * A server may choose to limit the number of results returned to the client for 18914 * reasons such as the size in bytes or quantity of results accumulated. This field 18915 * will true when more results exist in the current region. 18916 * </pre> 18917 */ hasMoreResultsInRegion()18918 boolean hasMoreResultsInRegion(); 18919 /** 18920 * <code>optional bool more_results_in_region = 8;</code> 18921 * 18922 * <pre> 18923 * A server may choose to limit the number of results returned to the client for 18924 * reasons such as the size in bytes or quantity of results accumulated. This field 18925 * will true when more results exist in the current region. 18926 * </pre> 18927 */ getMoreResultsInRegion()18928 boolean getMoreResultsInRegion(); 18929 18930 // optional bool heartbeat_message = 9; 18931 /** 18932 * <code>optional bool heartbeat_message = 9;</code> 18933 * 18934 * <pre> 18935 * This field is filled in if the server is sending back a heartbeat message. 18936 * Heartbeat messages are sent back to the client to prevent the scanner from 18937 * timing out. Seeing a heartbeat message communicates to the Client that the 18938 * server would have continued to scan had the time limit not been reached. 18939 * </pre> 18940 */ hasHeartbeatMessage()18941 boolean hasHeartbeatMessage(); 18942 /** 18943 * <code>optional bool heartbeat_message = 9;</code> 18944 * 18945 * <pre> 18946 * This field is filled in if the server is sending back a heartbeat message. 18947 * Heartbeat messages are sent back to the client to prevent the scanner from 18948 * timing out. Seeing a heartbeat message communicates to the Client that the 18949 * server would have continued to scan had the time limit not been reached. 18950 * </pre> 18951 */ getHeartbeatMessage()18952 boolean getHeartbeatMessage(); 18953 18954 // optional .ScanMetrics scan_metrics = 10; 18955 /** 18956 * <code>optional .ScanMetrics scan_metrics = 10;</code> 18957 * 18958 * <pre> 18959 * This field is filled in if the client has requested that scan metrics be tracked. 18960 * The metrics tracked here are sent back to the client to be tracked together with 18961 * the existing client side metrics. 18962 * </pre> 18963 */ hasScanMetrics()18964 boolean hasScanMetrics(); 18965 /** 18966 * <code>optional .ScanMetrics scan_metrics = 10;</code> 18967 * 18968 * <pre> 18969 * This field is filled in if the client has requested that scan metrics be tracked. 18970 * The metrics tracked here are sent back to the client to be tracked together with 18971 * the existing client side metrics. 18972 * </pre> 18973 */ getScanMetrics()18974 org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics(); 18975 /** 18976 * <code>optional .ScanMetrics scan_metrics = 10;</code> 18977 * 18978 * <pre> 18979 * This field is filled in if the client has requested that scan metrics be tracked. 18980 * The metrics tracked here are sent back to the client to be tracked together with 18981 * the existing client side metrics. 18982 * </pre> 18983 */ getScanMetricsOrBuilder()18984 org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder(); 18985 } 18986 /** 18987 * Protobuf type {@code ScanResponse} 18988 * 18989 * <pre> 18990 ** 18991 * The scan response. If there are no more results, more_results will 18992 * be false. If it is not specified, it means there are more. 18993 * </pre> 18994 */ 18995 public static final class ScanResponse extends 18996 com.google.protobuf.GeneratedMessage 18997 implements ScanResponseOrBuilder { 18998 // Use ScanResponse.newBuilder() to construct. ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)18999 private ScanResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 19000 super(builder); 19001 this.unknownFields = builder.getUnknownFields(); 19002 } ScanResponse(boolean noInit)19003 private ScanResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 19004 19005 private static final ScanResponse defaultInstance; getDefaultInstance()19006 public static ScanResponse getDefaultInstance() { 19007 return defaultInstance; 19008 } 19009 getDefaultInstanceForType()19010 public ScanResponse getDefaultInstanceForType() { 19011 return defaultInstance; 19012 } 19013 19014 private final com.google.protobuf.UnknownFieldSet unknownFields; 19015 @java.lang.Override 19016 public final com.google.protobuf.UnknownFieldSet getUnknownFields()19017 getUnknownFields() { 19018 return this.unknownFields; 19019 } ScanResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19020 private ScanResponse( 19021 com.google.protobuf.CodedInputStream input, 19022 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 19023 throws com.google.protobuf.InvalidProtocolBufferException { 19024 initFields(); 19025 int mutable_bitField0_ = 0; 19026 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 19027 com.google.protobuf.UnknownFieldSet.newBuilder(); 19028 try { 19029 boolean done = false; 19030 while (!done) { 19031 int tag = input.readTag(); 19032 switch (tag) { 19033 case 0: 19034 done = true; 19035 break; 19036 default: { 19037 if (!parseUnknownField(input, unknownFields, 19038 extensionRegistry, tag)) { 19039 done = true; 19040 } 19041 break; 19042 } 19043 case 8: { 19044 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 19045 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(); 19046 mutable_bitField0_ |= 0x00000001; 19047 } 19048 cellsPerResult_.add(input.readUInt32()); 19049 break; 19050 } 19051 case 10: { 19052 int length = input.readRawVarint32(); 19053 int limit = input.pushLimit(length); 19054 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001) && input.getBytesUntilLimit() > 0) { 19055 cellsPerResult_ = new java.util.ArrayList<java.lang.Integer>(); 19056 mutable_bitField0_ |= 0x00000001; 19057 } 19058 while (input.getBytesUntilLimit() > 0) { 19059 cellsPerResult_.add(input.readUInt32()); 19060 } 19061 input.popLimit(limit); 19062 break; 19063 } 19064 case 16: { 19065 bitField0_ |= 0x00000001; 19066 scannerId_ = input.readUInt64(); 19067 break; 19068 } 19069 case 24: { 19070 bitField0_ |= 0x00000002; 19071 moreResults_ = input.readBool(); 19072 break; 19073 } 19074 case 32: { 19075 bitField0_ |= 0x00000004; 19076 ttl_ = input.readUInt32(); 19077 break; 19078 } 19079 case 42: { 19080 if (!((mutable_bitField0_ & 0x00000010) == 0x00000010)) { 19081 results_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result>(); 19082 mutable_bitField0_ |= 0x00000010; 19083 } 19084 results_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result.PARSER, extensionRegistry)); 19085 break; 19086 } 19087 case 48: { 19088 bitField0_ |= 0x00000008; 19089 stale_ = input.readBool(); 19090 break; 19091 } 19092 case 56: { 19093 if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { 19094 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(); 19095 mutable_bitField0_ |= 0x00000040; 19096 } 19097 partialFlagPerResult_.add(input.readBool()); 19098 break; 19099 } 19100 case 58: { 19101 int length = input.readRawVarint32(); 19102 int limit = input.pushLimit(length); 19103 if (!((mutable_bitField0_ & 0x00000040) == 0x00000040) && input.getBytesUntilLimit() > 0) { 19104 partialFlagPerResult_ = new java.util.ArrayList<java.lang.Boolean>(); 19105 mutable_bitField0_ |= 0x00000040; 19106 } 19107 while (input.getBytesUntilLimit() > 0) { 19108 partialFlagPerResult_.add(input.readBool()); 19109 } 19110 input.popLimit(limit); 19111 break; 19112 } 19113 case 64: { 19114 bitField0_ |= 0x00000010; 19115 moreResultsInRegion_ = input.readBool(); 19116 break; 19117 } 19118 case 72: { 19119 bitField0_ |= 0x00000020; 19120 heartbeatMessage_ = input.readBool(); 19121 break; 19122 } 19123 case 82: { 19124 org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder subBuilder = null; 19125 if (((bitField0_ & 0x00000040) == 0x00000040)) { 19126 subBuilder = scanMetrics_.toBuilder(); 19127 } 19128 scanMetrics_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.PARSER, extensionRegistry); 19129 if (subBuilder != null) { 19130 subBuilder.mergeFrom(scanMetrics_); 19131 scanMetrics_ = subBuilder.buildPartial(); 19132 } 19133 bitField0_ |= 0x00000040; 19134 break; 19135 } 19136 } 19137 } 19138 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 19139 throw e.setUnfinishedMessage(this); 19140 } catch (java.io.IOException e) { 19141 throw new com.google.protobuf.InvalidProtocolBufferException( 19142 e.getMessage()).setUnfinishedMessage(this); 19143 } finally { 19144 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 19145 cellsPerResult_ = java.util.Collections.unmodifiableList(cellsPerResult_); 19146 } 19147 if (((mutable_bitField0_ & 0x00000010) == 0x00000010)) { 19148 results_ = java.util.Collections.unmodifiableList(results_); 19149 } 19150 if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { 19151 partialFlagPerResult_ = java.util.Collections.unmodifiableList(partialFlagPerResult_); 19152 } 19153 this.unknownFields = unknownFields.build(); 19154 makeExtensionsImmutable(); 19155 } 19156 } 19157 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()19158 getDescriptor() { 19159 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_descriptor; 19160 } 19161 19162 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()19163 internalGetFieldAccessorTable() { 19164 return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ScanResponse_fieldAccessorTable 19165 .ensureFieldAccessorsInitialized( 19166 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.class, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse.Builder.class); 19167 } 19168 19169 public static com.google.protobuf.Parser<ScanResponse> PARSER = 19170 new com.google.protobuf.AbstractParser<ScanResponse>() { 19171 public ScanResponse parsePartialFrom( 19172 com.google.protobuf.CodedInputStream input, 19173 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 19174 throws com.google.protobuf.InvalidProtocolBufferException { 19175 return new ScanResponse(input, extensionRegistry); 19176 } 19177 }; 19178 19179 @java.lang.Override getParserForType()19180 public com.google.protobuf.Parser<ScanResponse> getParserForType() { 19181 return PARSER; 19182 } 19183 19184 private int bitField0_; 19185 // repeated uint32 cells_per_result = 1; 19186 public static final int CELLS_PER_RESULT_FIELD_NUMBER = 1; 19187 private java.util.List<java.lang.Integer> cellsPerResult_; 19188 /** 19189 * <code>repeated uint32 cells_per_result = 1;</code> 19190 * 19191 * <pre> 19192 * This field is filled in if we are doing cellblocks. A cellblock is made up 19193 * of all Cells serialized out as one cellblock BUT responses from a server 19194 * have their Cells grouped by Result. So we can reconstitute the 19195 * Results on the client-side, this field is a list of counts of Cells 19196 * in each Result that makes up the response. For example, if this field 19197 * has 3, 3, 3 in it, then we know that on the client, we are to make 19198 * three Results each of three Cells each. 19199 * </pre> 19200 */ 19201 public java.util.List<java.lang.Integer> getCellsPerResultList()19202 getCellsPerResultList() { 19203 return cellsPerResult_; 19204 } 19205 /** 19206 * <code>repeated uint32 cells_per_result = 1;</code> 19207 * 19208 * <pre> 19209 * This field is filled in if we are doing cellblocks. A cellblock is made up 19210 * of all Cells serialized out as one cellblock BUT responses from a server 19211 * have their Cells grouped by Result. So we can reconstitute the 19212 * Results on the client-side, this field is a list of counts of Cells 19213 * in each Result that makes up the response. For example, if this field 19214 * has 3, 3, 3 in it, then we know that on the client, we are to make 19215 * three Results each of three Cells each. 19216 * </pre> 19217 */ getCellsPerResultCount()19218 public int getCellsPerResultCount() { 19219 return cellsPerResult_.size(); 19220 } 19221 /** 19222 * <code>repeated uint32 cells_per_result = 1;</code> 19223 * 19224 * <pre> 19225 * This field is filled in if we are doing cellblocks. A cellblock is made up 19226 * of all Cells serialized out as one cellblock BUT responses from a server 19227 * have their Cells grouped by Result. So we can reconstitute the 19228 * Results on the client-side, this field is a list of counts of Cells 19229 * in each Result that makes up the response. For example, if this field 19230 * has 3, 3, 3 in it, then we know that on the client, we are to make 19231 * three Results each of three Cells each. 19232 * </pre> 19233 */ getCellsPerResult(int index)19234 public int getCellsPerResult(int index) { 19235 return cellsPerResult_.get(index); 19236 } 19237 19238 // optional uint64 scanner_id = 2; 19239 public static final int SCANNER_ID_FIELD_NUMBER = 2; 19240 private long scannerId_; 19241 /** 19242 * <code>optional uint64 scanner_id = 2;</code> 19243 */ hasScannerId()19244 public boolean hasScannerId() { 19245 return ((bitField0_ & 0x00000001) == 0x00000001); 19246 } 19247 /** 19248 * <code>optional uint64 scanner_id = 2;</code> 19249 */ getScannerId()19250 public long getScannerId() { 19251 return scannerId_; 19252 } 19253 19254 // optional bool more_results = 3; 19255 public static final int MORE_RESULTS_FIELD_NUMBER = 3; 19256 private boolean moreResults_; 19257 /** 19258 * <code>optional bool more_results = 3;</code> 19259 */ hasMoreResults()19260 public boolean hasMoreResults() { 19261 return ((bitField0_ & 0x00000002) == 0x00000002); 19262 } 19263 /** 19264 * <code>optional bool more_results = 3;</code> 19265 */ getMoreResults()19266 public boolean getMoreResults() { 19267 return moreResults_; 19268 } 19269 19270 // optional uint32 ttl = 4; 19271 public static final int TTL_FIELD_NUMBER = 4; 19272 private int ttl_; 19273 /** 19274 * <code>optional uint32 ttl = 4;</code> 19275 */ hasTtl()19276 public boolean hasTtl() { 19277 return ((bitField0_ & 0x00000004) == 0x00000004); 19278 } 19279 /** 19280 * <code>optional uint32 ttl = 4;</code> 19281 */ getTtl()19282 public int getTtl() { 19283 return ttl_; 19284 } 19285 19286 // repeated .Result results = 5; 19287 public static final int RESULTS_FIELD_NUMBER = 5; 19288 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> results_; 19289 /** 19290 * <code>repeated .Result results = 5;</code> 19291 * 19292 * <pre> 19293 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 19294 * This field is mutually exclusive with cells_per_result (since the Cells will 19295 * be inside the pb'd Result) 19296 * </pre> 19297 */ getResultsList()19298 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result> getResultsList() { 19299 return results_; 19300 } 19301 /** 19302 * <code>repeated .Result results = 5;</code> 19303 * 19304 * <pre> 19305 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 19306 * This field is mutually exclusive with cells_per_result (since the Cells will 19307 * be inside the pb'd Result) 19308 * </pre> 19309 */ 19310 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder> getResultsOrBuilderList()19311 getResultsOrBuilderList() { 19312 return results_; 19313 } 19314 /** 19315 * <code>repeated .Result results = 5;</code> 19316 * 19317 * <pre> 19318 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 19319 * This field is mutually exclusive with cells_per_result (since the Cells will 19320 * be inside the pb'd Result) 19321 * </pre> 19322 */ getResultsCount()19323 public int getResultsCount() { 19324 return results_.size(); 19325 } 19326 /** 19327 * <code>repeated .Result results = 5;</code> 19328 * 19329 * <pre> 19330 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 19331 * This field is mutually exclusive with cells_per_result (since the Cells will 19332 * be inside the pb'd Result) 19333 * </pre> 19334 */ getResults(int index)19335 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Result getResults(int index) { 19336 return results_.get(index); 19337 } 19338 /** 19339 * <code>repeated .Result results = 5;</code> 19340 * 19341 * <pre> 19342 * If cells are not carried in an accompanying cellblock, then they are pb'd here. 19343 * This field is mutually exclusive with cells_per_result (since the Cells will 19344 * be inside the pb'd Result) 19345 * </pre> 19346 */ getResultsOrBuilder( int index)19347 public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ResultOrBuilder getResultsOrBuilder( 19348 int index) { 19349 return results_.get(index); 19350 } 19351 19352 // optional bool stale = 6; 19353 public static final int STALE_FIELD_NUMBER = 6; 19354 private boolean stale_; 19355 /** 19356 * <code>optional bool stale = 6;</code> 19357 */ hasStale()19358 public boolean hasStale() { 19359 return ((bitField0_ & 0x00000008) == 0x00000008); 19360 } 19361 /** 19362 * <code>optional bool stale = 6;</code> 19363 */ getStale()19364 public boolean getStale() { 19365 return stale_; 19366 } 19367 19368 // repeated bool partial_flag_per_result = 7; 19369 public static final int PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER = 7; 19370 private java.util.List<java.lang.Boolean> partialFlagPerResult_; 19371 /** 19372 * <code>repeated bool partial_flag_per_result = 7;</code> 19373 * 19374 * <pre> 19375 * This field is filled in if we are doing cellblocks. In the event that a row 19376 * could not fit all of its cells into a single RPC chunk, the results will be 19377 * returned as partials, and reconstructed into a complete result on the client 19378 * side. This field is a list of flags indicating whether or not the result 19379 * that the cells belong to is a partial result. For example, if this field 19380 * has false, false, true in it, then we know that on the client side, we need to 19381 * make another RPC request since the last result was only a partial. 19382 * </pre> 19383 */ 19384 public java.util.List<java.lang.Boolean> getPartialFlagPerResultList()19385 getPartialFlagPerResultList() { 19386 return partialFlagPerResult_; 19387 } 19388 /** 19389 * <code>repeated bool partial_flag_per_result = 7;</code> 19390 * 19391 * <pre> 19392 * This field is filled in if we are doing cellblocks. In the event that a row 19393 * could not fit all of its cells into a single RPC chunk, the results will be 19394 * returned as partials, and reconstructed into a complete result on the client 19395 * side. This field is a list of flags indicating whether or not the result 19396 * that the cells belong to is a partial result. For example, if this field 19397 * has false, false, true in it, then we know that on the client side, we need to 19398 * make another RPC request since the last result was only a partial. 19399 * </pre> 19400 */ getPartialFlagPerResultCount()19401 public int getPartialFlagPerResultCount() { 19402 return partialFlagPerResult_.size(); 19403 } 19404 /** 19405 * <code>repeated bool partial_flag_per_result = 7;</code> 19406 * 19407 * <pre> 19408 * This field is filled in if we are doing cellblocks. In the event that a row 19409 * could not fit all of its cells into a single RPC chunk, the results will be 19410 * returned as partials, and reconstructed into a complete result on the client 19411 * side. This field is a list of flags indicating whether or not the result 19412 * that the cells belong to is a partial result. For example, if this field 19413 * has false, false, true in it, then we know that on the client side, we need to 19414 * make another RPC request since the last result was only a partial. 19415 * </pre> 19416 */ getPartialFlagPerResult(int index)19417 public boolean getPartialFlagPerResult(int index) { 19418 return partialFlagPerResult_.get(index); 19419 } 19420 19421 // optional bool more_results_in_region = 8; 19422 public static final int MORE_RESULTS_IN_REGION_FIELD_NUMBER = 8; 19423 private boolean moreResultsInRegion_; 19424 /** 19425 * <code>optional bool more_results_in_region = 8;</code> 19426 * 19427 * <pre> 19428 * A server may choose to limit the number of results returned to the client for 19429 * reasons such as the size in bytes or quantity of results accumulated. This field 19430 * will true when more results exist in the current region. 19431 * </pre> 19432 */ hasMoreResultsInRegion()19433 public boolean hasMoreResultsInRegion() { 19434 return ((bitField0_ & 0x00000010) == 0x00000010); 19435 } 19436 /** 19437 * <code>optional bool more_results_in_region = 8;</code> 19438 * 19439 * <pre> 19440 * A server may choose to limit the number of results returned to the client for 19441 * reasons such as the size in bytes or quantity of results accumulated. This field 19442 * will true when more results exist in the current region. 19443 * </pre> 19444 */ getMoreResultsInRegion()19445 public boolean getMoreResultsInRegion() { 19446 return moreResultsInRegion_; 19447 } 19448 19449 // optional bool heartbeat_message = 9; 19450 public static final int HEARTBEAT_MESSAGE_FIELD_NUMBER = 9; 19451 private boolean heartbeatMessage_; 19452 /** 19453 * <code>optional bool heartbeat_message = 9;</code> 19454 * 19455 * <pre> 19456 * This field is filled in if the server is sending back a heartbeat message. 19457 * Heartbeat messages are sent back to the client to prevent the scanner from 19458 * timing out. Seeing a heartbeat message communicates to the Client that the 19459 * server would have continued to scan had the time limit not been reached. 19460 * </pre> 19461 */ hasHeartbeatMessage()19462 public boolean hasHeartbeatMessage() { 19463 return ((bitField0_ & 0x00000020) == 0x00000020); 19464 } 19465 /** 19466 * <code>optional bool heartbeat_message = 9;</code> 19467 * 19468 * <pre> 19469 * This field is filled in if the server is sending back a heartbeat message. 19470 * Heartbeat messages are sent back to the client to prevent the scanner from 19471 * timing out. Seeing a heartbeat message communicates to the Client that the 19472 * server would have continued to scan had the time limit not been reached. 19473 * </pre> 19474 */ getHeartbeatMessage()19475 public boolean getHeartbeatMessage() { 19476 return heartbeatMessage_; 19477 } 19478 19479 // optional .ScanMetrics scan_metrics = 10; 19480 public static final int SCAN_METRICS_FIELD_NUMBER = 10; 19481 private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics scanMetrics_; 19482 /** 19483 * <code>optional .ScanMetrics scan_metrics = 10;</code> 19484 * 19485 * <pre> 19486 * This field is filled in if the client has requested that scan metrics be tracked. 19487 * The metrics tracked here are sent back to the client to be tracked together with 19488 * the existing client side metrics. 19489 * </pre> 19490 */ hasScanMetrics()19491 public boolean hasScanMetrics() { 19492 return ((bitField0_ & 0x00000040) == 0x00000040); 19493 } 19494 /** 19495 * <code>optional .ScanMetrics scan_metrics = 10;</code> 19496 * 19497 * <pre> 19498 * This field is filled in if the client has requested that scan metrics be tracked. 19499 * The metrics tracked here are sent back to the client to be tracked together with 19500 * the existing client side metrics. 19501 * </pre> 19502 */ getScanMetrics()19503 public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getScanMetrics() { 19504 return scanMetrics_; 19505 } 19506 /** 19507 * <code>optional .ScanMetrics scan_metrics = 10;</code> 19508 * 19509 * <pre> 19510 * This field is filled in if the client has requested that scan metrics be tracked. 19511 * The metrics tracked here are sent back to the client to be tracked together with 19512 * the existing client side metrics. 19513 * </pre> 19514 */ getScanMetricsOrBuilder()19515 public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder getScanMetricsOrBuilder() { 19516 return scanMetrics_; 19517 } 19518 initFields()19519 private void initFields() { 19520 cellsPerResult_ = java.util.Collections.emptyList(); 19521 scannerId_ = 0L; 19522 moreResults_ = false; 19523 ttl_ = 0; 19524 results_ = java.util.Collections.emptyList(); 19525 stale_ = false; 19526 partialFlagPerResult_ = java.util.Collections.emptyList(); 19527 moreResultsInRegion_ = false; 19528 heartbeatMessage_ = false; 19529 scanMetrics_ = org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance(); 19530 } 19531 private byte memoizedIsInitialized = -1; isInitialized()19532 public final boolean isInitialized() { 19533 byte isInitialized = memoizedIsInitialized; 19534 if (isInitialized != -1) return isInitialized == 1; 19535 19536 memoizedIsInitialized = 1; 19537 return true; 19538 } 19539 writeTo(com.google.protobuf.CodedOutputStream output)19540 public void writeTo(com.google.protobuf.CodedOutputStream output) 19541 throws java.io.IOException { 19542 getSerializedSize(); 19543 for (int i = 0; i < cellsPerResult_.size(); i++) { 19544 output.writeUInt32(1, cellsPerResult_.get(i)); 19545 } 19546 if (((bitField0_ & 0x00000001) == 0x00000001)) { 19547 output.writeUInt64(2, scannerId_); 19548 } 19549 if (((bitField0_ & 0x00000002) == 0x00000002)) { 19550 output.writeBool(3, moreResults_); 19551 } 19552 if (((bitField0_ & 0x00000004) == 0x00000004)) { 19553 output.writeUInt32(4, ttl_); 19554 } 19555 for (int i = 0; i < results_.size(); i++) { 19556 output.writeMessage(5, results_.get(i)); 19557 } 19558 if (((bitField0_ & 0x00000008) == 0x00000008)) { 19559 output.writeBool(6, stale_); 19560 } 19561 for (int i = 0; i < partialFlagPerResult_.size(); i++) { 19562 output.writeBool(7, partialFlagPerResult_.get(i)); 19563 } 19564 if (((bitField0_ & 0x00000010) == 0x00000010)) { 19565 output.writeBool(8, moreResultsInRegion_); 19566 } 19567 if (((bitField0_ & 0x00000020) == 0x00000020)) { 19568 output.writeBool(9, heartbeatMessage_); 19569 } 19570 if (((bitField0_ & 0x00000040) == 0x00000040)) { 19571 output.writeMessage(10, scanMetrics_); 19572 } 19573 getUnknownFields().writeTo(output); 19574 } 19575 19576 private int memoizedSerializedSize = -1; getSerializedSize()19577 public int getSerializedSize() { 19578 int size = memoizedSerializedSize; 19579 if (size != -1) return size; 19580 19581 size = 0; 19582 { 19583 int dataSize = 0; 19584 for (int i = 0; i < cellsPerResult_.size(); i++) { 19585 dataSize += com.google.protobuf.CodedOutputStream 19586 .computeUInt32SizeNoTag(cellsPerResult_.get(i)); 19587 } 19588 size += dataSize; 19589 size += 1 * getCellsPerResultList().size(); 19590 } 19591 if (((bitField0_ & 0x00000001) == 0x00000001)) { 19592 size += com.google.protobuf.CodedOutputStream 19593 .computeUInt64Size(2, scannerId_); 19594 } 19595 if (((bitField0_ & 0x00000002) == 0x00000002)) { 19596 size += com.google.protobuf.CodedOutputStream 19597 .computeBoolSize(3, moreResults_); 19598 } 19599 if (((bitField0_ & 0x00000004) == 0x00000004)) { 19600 size += com.google.protobuf.CodedOutputStream 19601 .computeUInt32Size(4, ttl_); 19602 } 19603 for (int i = 0; i < results_.size(); i++) { 19604 size += com.google.protobuf.CodedOutputStream 19605 .computeMessageSize(5, results_.get(i)); 19606 } 19607 if (((bitField0_ & 0x00000008) == 0x00000008)) { 19608 size += com.google.protobuf.CodedOutputStream 19609 .computeBoolSize(6, stale_); 19610 } 19611 { 19612 int dataSize = 0; 19613 dataSize = 1 * getPartialFlagPerResultList().size(); 19614 size += dataSize; 19615 size += 1 * getPartialFlagPerResultList().size(); 19616 } 19617 if (((bitField0_ & 0x00000010) == 0x00000010)) { 19618 size += com.google.protobuf.CodedOutputStream 19619 .computeBoolSize(8, moreResultsInRegion_); 19620 } 19621 if (((bitField0_ & 0x00000020) == 0x00000020)) { 19622 size += com.google.protobuf.CodedOutputStream 19623 .computeBoolSize(9, heartbeatMessage_); 19624 } 19625 if (((bitField0_ & 0x00000040) == 0x00000040)) { 19626 size += com.google.protobuf.CodedOutputStream 19627 .computeMessageSize(10, scanMetrics_); 19628 } 19629 size += getUnknownFields().getSerializedSize(); 19630 memoizedSerializedSize = size; 19631 return size; 19632 } 19633 19634 private static final long serialVersionUID = 0L; 19635 @java.lang.Override writeReplace()19636 protected java.lang.Object writeReplace() 19637 throws java.io.ObjectStreamException { 19638 return super.writeReplace(); 19639 } 19640 19641 @java.lang.Override equals(final java.lang.Object obj)19642 public boolean equals(final java.lang.Object obj) { 19643 if (obj == this) { 19644 return true; 19645 } 19646 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse)) { 19647 return super.equals(obj); 19648 } 19649 org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse) obj; 19650 19651 boolean result = true; 19652 result = result && getCellsPerResultList() 19653 .equals(other.getCellsPerResultList()); 19654 result = result && (hasScannerId() == other.hasScannerId()); 19655 if (hasScannerId()) { 19656 result = result && (getScannerId() 19657 == other.getScannerId()); 19658 } 19659 result = result && (hasMoreResults() == other.hasMoreResults()); 19660 if (hasMoreResults()) { 19661 result = result && (getMoreResults() 19662 == other.getMoreResults()); 19663 } 19664 result = result && (hasTtl() == other.hasTtl()); 19665 if (hasTtl()) { 19666 result = result && (getTtl() 19667 == other.getTtl()); 19668 } 19669 result = result && getResultsList() 19670 .equals(other.getResultsList()); 19671 result = result && (hasStale() == other.hasStale()); 19672 if (hasStale()) { 19673 result = result && (getStale() 19674 == other.getStale()); 19675 } 19676 result = result && getPartialFlagPerResultList() 19677 .equals(other.getPartialFlagPerResultList()); 19678 result = result && (hasMoreResultsInRegion() == other.hasMoreResultsInRegion()); 19679 if (hasMoreResultsInRegion()) { 19680 result = result && (getMoreResultsInRegion() 19681 == other.getMoreResultsInRegion()); 19682 } 19683 result = result && (hasHeartbeatMessage() == other.hasHeartbeatMessage()); 19684 if (hasHeartbeatMessage()) { 19685 result = result && (getHeartbeatMessage() 19686 == other.getHeartbeatMessage()); 19687 } 19688 result = result && (hasScanMetrics() == other.hasScanMetrics()); 19689 if (hasScanMetrics()) { 19690 result = result && getScanMetrics() 19691 .equals(other.getScanMetrics()); 19692 } 19693 result = result && 19694 getUnknownFields().equals(other.getUnknownFields()); 19695 return result; 19696 } 19697 19698 private int memoizedHashCode = 0; 19699 @java.lang.Override hashCode()19700 public int hashCode() { 19701 if (memoizedHashCode != 0) { 19702 return memoizedHashCode; 19703 } 19704 int hash = 41; 19705 hash = (19 * hash) + getDescriptorForType().hashCode(); 19706 if (getCellsPerResultCount() > 0) { 19707 hash = (37 * hash) + CELLS_PER_RESULT_FIELD_NUMBER; 19708 hash = (53 * hash) + getCellsPerResultList().hashCode(); 19709 } 19710 if (hasScannerId()) { 19711 hash = (37 * hash) + SCANNER_ID_FIELD_NUMBER; 19712 hash = (53 * hash) + hashLong(getScannerId()); 19713 } 19714 if (hasMoreResults()) { 19715 hash = (37 * hash) + MORE_RESULTS_FIELD_NUMBER; 19716 hash = (53 * hash) + hashBoolean(getMoreResults()); 19717 } 19718 if (hasTtl()) { 19719 hash = (37 * hash) + TTL_FIELD_NUMBER; 19720 hash = (53 * hash) + getTtl(); 19721 } 19722 if (getResultsCount() > 0) { 19723 hash = (37 * hash) + RESULTS_FIELD_NUMBER; 19724 hash = (53 * hash) + getResultsList().hashCode(); 19725 } 19726 if (hasStale()) { 19727 hash = (37 * hash) + STALE_FIELD_NUMBER; 19728 hash = (53 * hash) + hashBoolean(getStale()); 19729 } 19730 if (getPartialFlagPerResultCount() > 0) { 19731 hash = (37 * hash) + PARTIAL_FLAG_PER_RESULT_FIELD_NUMBER; 19732 hash = (53 * hash) + getPartialFlagPerResultList().hashCode(); 19733 } 19734 if (hasMoreResultsInRegion()) { 19735 hash = (37 * hash) + MORE_RESULTS_IN_REGION_FIELD_NUMBER; 19736 hash = (53 * hash) + hashBoolean(getMoreResultsInRegion()); 19737 } 19738 if (hasHeartbeatMessage()) { 19739 hash = (37 * hash) + HEARTBEAT_MESSAGE_FIELD_NUMBER; 19740 hash = (53 * hash) + hashBoolean(getHeartbeatMessage()); 19741 } 19742 if (hasScanMetrics()) { 19743 hash = (37 * hash) + SCAN_METRICS_FIELD_NUMBER; 19744 hash = (53 * hash) + getScanMetrics().hashCode(); 19745 } 19746 hash = (29 * hash) + getUnknownFields().hashCode(); 19747 memoizedHashCode = hash; 19748 return hash; 19749 } 19750 parseFrom( com.google.protobuf.ByteString data)19751 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( 19752 com.google.protobuf.ByteString data) 19753 throws com.google.protobuf.InvalidProtocolBufferException { 19754 return PARSER.parseFrom(data); 19755 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19756 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( 19757 com.google.protobuf.ByteString data, 19758 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 19759 throws com.google.protobuf.InvalidProtocolBufferException { 19760 return PARSER.parseFrom(data, extensionRegistry); 19761 } parseFrom(byte[] data)19762 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(byte[] data) 19763 throws com.google.protobuf.InvalidProtocolBufferException { 19764 return PARSER.parseFrom(data); 19765 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19766 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( 19767 byte[] data, 19768 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 19769 throws com.google.protobuf.InvalidProtocolBufferException { 19770 return PARSER.parseFrom(data, extensionRegistry); 19771 } parseFrom(java.io.InputStream input)19772 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom(java.io.InputStream input) 19773 throws java.io.IOException { 19774 return PARSER.parseFrom(input); 19775 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)19776 public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanResponse parseFrom( 19777 java.io.InputStream input, 19778 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 19779 throws java.io.IOException { 19780 return PARSER.parseFrom(input, extensionRegistry); 19781 } parseDelimitedFrom(java.io.InputStream input)19782