1 // Generated by the protocol buffer compiler. DO NOT EDIT! 2 // source: HBase.proto 3 4 package org.apache.hadoop.hbase.protobuf.generated; 5 6 public final class HBaseProtos { HBaseProtos()7 private HBaseProtos() {} registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8 public static void registerAllExtensions( 9 com.google.protobuf.ExtensionRegistry registry) { 10 } 11 /** 12 * Protobuf enum {@code CompareType} 13 * 14 * <pre> 15 * Comparison operators 16 * </pre> 17 */ 18 public enum CompareType 19 implements com.google.protobuf.ProtocolMessageEnum { 20 /** 21 * <code>LESS = 0;</code> 22 */ 23 LESS(0, 0), 24 /** 25 * <code>LESS_OR_EQUAL = 1;</code> 26 */ 27 LESS_OR_EQUAL(1, 1), 28 /** 29 * <code>EQUAL = 2;</code> 30 */ 31 EQUAL(2, 2), 32 /** 33 * <code>NOT_EQUAL = 3;</code> 34 */ 35 NOT_EQUAL(3, 3), 36 /** 37 * <code>GREATER_OR_EQUAL = 4;</code> 38 */ 39 GREATER_OR_EQUAL(4, 4), 40 /** 41 * <code>GREATER = 5;</code> 42 */ 43 GREATER(5, 5), 44 /** 45 * <code>NO_OP = 6;</code> 46 */ 47 NO_OP(6, 6), 48 ; 49 50 /** 51 * <code>LESS = 0;</code> 52 */ 53 public static final int LESS_VALUE = 0; 54 /** 55 * <code>LESS_OR_EQUAL = 1;</code> 56 */ 57 public static final int LESS_OR_EQUAL_VALUE = 1; 58 /** 59 * <code>EQUAL = 2;</code> 60 */ 61 public static final int EQUAL_VALUE = 2; 62 /** 63 * <code>NOT_EQUAL = 3;</code> 64 */ 65 public static final int NOT_EQUAL_VALUE = 3; 66 /** 67 * <code>GREATER_OR_EQUAL = 4;</code> 68 */ 69 public static final int GREATER_OR_EQUAL_VALUE = 4; 70 /** 71 * <code>GREATER = 5;</code> 72 */ 73 public static final int GREATER_VALUE = 5; 74 /** 75 * <code>NO_OP = 6;</code> 76 */ 77 public static final int NO_OP_VALUE = 6; 78 79 getNumber()80 public final int getNumber() { return value; } 81 valueOf(int value)82 public static CompareType valueOf(int value) { 83 switch (value) { 84 case 0: return LESS; 85 case 1: return LESS_OR_EQUAL; 86 case 2: return EQUAL; 87 case 3: return NOT_EQUAL; 88 case 4: return GREATER_OR_EQUAL; 89 case 5: return GREATER; 90 case 6: return NO_OP; 91 default: return null; 92 } 93 } 94 95 public static com.google.protobuf.Internal.EnumLiteMap<CompareType> internalGetValueMap()96 internalGetValueMap() { 97 return internalValueMap; 98 } 99 private static com.google.protobuf.Internal.EnumLiteMap<CompareType> 100 internalValueMap = 101 new com.google.protobuf.Internal.EnumLiteMap<CompareType>() { 102 public CompareType findValueByNumber(int number) { 103 return CompareType.valueOf(number); 104 } 105 }; 106 107 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()108 getValueDescriptor() { 109 return getDescriptor().getValues().get(index); 110 } 111 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()112 getDescriptorForType() { 113 return getDescriptor(); 114 } 115 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()116 getDescriptor() { 117 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); 118 } 119 120 private static final CompareType[] VALUES = values(); 121 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)122 public static CompareType valueOf( 123 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 124 if (desc.getType() != getDescriptor()) { 125 throw new java.lang.IllegalArgumentException( 126 "EnumValueDescriptor is not for this type."); 127 } 128 return VALUES[desc.getIndex()]; 129 } 130 131 private final int index; 132 private final int value; 133 CompareType(int index, int value)134 private CompareType(int index, int value) { 135 this.index = index; 136 this.value = value; 137 } 138 139 // @@protoc_insertion_point(enum_scope:CompareType) 140 } 141 142 /** 143 * Protobuf enum {@code TimeUnit} 144 */ 145 public enum TimeUnit 146 implements com.google.protobuf.ProtocolMessageEnum { 147 /** 148 * <code>NANOSECONDS = 1;</code> 149 */ 150 NANOSECONDS(0, 1), 151 /** 152 * <code>MICROSECONDS = 2;</code> 153 */ 154 MICROSECONDS(1, 2), 155 /** 156 * <code>MILLISECONDS = 3;</code> 157 */ 158 MILLISECONDS(2, 3), 159 /** 160 * <code>SECONDS = 4;</code> 161 */ 162 SECONDS(3, 4), 163 /** 164 * <code>MINUTES = 5;</code> 165 */ 166 MINUTES(4, 5), 167 /** 168 * <code>HOURS = 6;</code> 169 */ 170 HOURS(5, 6), 171 /** 172 * <code>DAYS = 7;</code> 173 */ 174 DAYS(6, 7), 175 ; 176 177 /** 178 * <code>NANOSECONDS = 1;</code> 179 */ 180 public static final int NANOSECONDS_VALUE = 1; 181 /** 182 * <code>MICROSECONDS = 2;</code> 183 */ 184 public static final int MICROSECONDS_VALUE = 2; 185 /** 186 * <code>MILLISECONDS = 3;</code> 187 */ 188 public static final int MILLISECONDS_VALUE = 3; 189 /** 190 * <code>SECONDS = 4;</code> 191 */ 192 public static final int SECONDS_VALUE = 4; 193 /** 194 * <code>MINUTES = 5;</code> 195 */ 196 public static final int MINUTES_VALUE = 5; 197 /** 198 * <code>HOURS = 6;</code> 199 */ 200 public static final int HOURS_VALUE = 6; 201 /** 202 * <code>DAYS = 7;</code> 203 */ 204 public static final int DAYS_VALUE = 7; 205 206 getNumber()207 public final int getNumber() { return value; } 208 valueOf(int value)209 public static TimeUnit valueOf(int value) { 210 switch (value) { 211 case 1: return NANOSECONDS; 212 case 2: return MICROSECONDS; 213 case 3: return MILLISECONDS; 214 case 4: return SECONDS; 215 case 5: return MINUTES; 216 case 6: return HOURS; 217 case 7: return DAYS; 218 default: return null; 219 } 220 } 221 222 public static com.google.protobuf.Internal.EnumLiteMap<TimeUnit> internalGetValueMap()223 internalGetValueMap() { 224 return internalValueMap; 225 } 226 private static com.google.protobuf.Internal.EnumLiteMap<TimeUnit> 227 internalValueMap = 228 new com.google.protobuf.Internal.EnumLiteMap<TimeUnit>() { 229 public TimeUnit findValueByNumber(int number) { 230 return TimeUnit.valueOf(number); 231 } 232 }; 233 234 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()235 getValueDescriptor() { 236 return getDescriptor().getValues().get(index); 237 } 238 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()239 getDescriptorForType() { 240 return getDescriptor(); 241 } 242 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()243 getDescriptor() { 244 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(1); 245 } 246 247 private static final TimeUnit[] VALUES = values(); 248 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)249 public static TimeUnit valueOf( 250 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 251 if (desc.getType() != getDescriptor()) { 252 throw new java.lang.IllegalArgumentException( 253 "EnumValueDescriptor is not for this type."); 254 } 255 return VALUES[desc.getIndex()]; 256 } 257 258 private final int index; 259 private final int value; 260 TimeUnit(int index, int value)261 private TimeUnit(int index, int value) { 262 this.index = index; 263 this.value = value; 264 } 265 266 // @@protoc_insertion_point(enum_scope:TimeUnit) 267 } 268 269 public interface TableNameOrBuilder 270 extends com.google.protobuf.MessageOrBuilder { 271 272 // required bytes namespace = 1; 273 /** 274 * <code>required bytes namespace = 1;</code> 275 */ hasNamespace()276 boolean hasNamespace(); 277 /** 278 * <code>required bytes namespace = 1;</code> 279 */ getNamespace()280 com.google.protobuf.ByteString getNamespace(); 281 282 // required bytes qualifier = 2; 283 /** 284 * <code>required bytes qualifier = 2;</code> 285 */ hasQualifier()286 boolean hasQualifier(); 287 /** 288 * <code>required bytes qualifier = 2;</code> 289 */ getQualifier()290 com.google.protobuf.ByteString getQualifier(); 291 } 292 /** 293 * Protobuf type {@code TableName} 294 * 295 * <pre> 296 ** 297 * Table Name 298 * </pre> 299 */ 300 public static final class TableName extends 301 com.google.protobuf.GeneratedMessage 302 implements TableNameOrBuilder { 303 // Use TableName.newBuilder() to construct. TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder)304 private TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 305 super(builder); 306 this.unknownFields = builder.getUnknownFields(); 307 } TableName(boolean noInit)308 private TableName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 309 310 private static final TableName defaultInstance; getDefaultInstance()311 public static TableName getDefaultInstance() { 312 return defaultInstance; 313 } 314 getDefaultInstanceForType()315 public TableName getDefaultInstanceForType() { 316 return defaultInstance; 317 } 318 319 private final com.google.protobuf.UnknownFieldSet unknownFields; 320 @java.lang.Override 321 public final com.google.protobuf.UnknownFieldSet getUnknownFields()322 getUnknownFields() { 323 return this.unknownFields; 324 } TableName( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)325 private TableName( 326 com.google.protobuf.CodedInputStream input, 327 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 328 throws com.google.protobuf.InvalidProtocolBufferException { 329 initFields(); 330 int mutable_bitField0_ = 0; 331 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 332 com.google.protobuf.UnknownFieldSet.newBuilder(); 333 try { 334 boolean done = false; 335 while (!done) { 336 int tag = input.readTag(); 337 switch (tag) { 338 case 0: 339 done = true; 340 break; 341 default: { 342 if (!parseUnknownField(input, unknownFields, 343 extensionRegistry, tag)) { 344 done = true; 345 } 346 break; 347 } 348 case 10: { 349 bitField0_ |= 0x00000001; 350 namespace_ = input.readBytes(); 351 break; 352 } 353 case 18: { 354 bitField0_ |= 0x00000002; 355 qualifier_ = input.readBytes(); 356 break; 357 } 358 } 359 } 360 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 361 throw e.setUnfinishedMessage(this); 362 } catch (java.io.IOException e) { 363 throw new com.google.protobuf.InvalidProtocolBufferException( 364 e.getMessage()).setUnfinishedMessage(this); 365 } finally { 366 this.unknownFields = unknownFields.build(); 367 makeExtensionsImmutable(); 368 } 369 } 370 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()371 getDescriptor() { 372 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableName_descriptor; 373 } 374 375 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()376 internalGetFieldAccessorTable() { 377 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableName_fieldAccessorTable 378 .ensureFieldAccessorsInitialized( 379 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder.class); 380 } 381 382 public static com.google.protobuf.Parser<TableName> PARSER = 383 new com.google.protobuf.AbstractParser<TableName>() { 384 public TableName parsePartialFrom( 385 com.google.protobuf.CodedInputStream input, 386 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 387 throws com.google.protobuf.InvalidProtocolBufferException { 388 return new TableName(input, extensionRegistry); 389 } 390 }; 391 392 @java.lang.Override getParserForType()393 public com.google.protobuf.Parser<TableName> getParserForType() { 394 return PARSER; 395 } 396 397 private int bitField0_; 398 // required bytes namespace = 1; 399 public static final int NAMESPACE_FIELD_NUMBER = 1; 400 private com.google.protobuf.ByteString namespace_; 401 /** 402 * <code>required bytes namespace = 1;</code> 403 */ hasNamespace()404 public boolean hasNamespace() { 405 return ((bitField0_ & 0x00000001) == 0x00000001); 406 } 407 /** 408 * <code>required bytes namespace = 1;</code> 409 */ getNamespace()410 public com.google.protobuf.ByteString getNamespace() { 411 return namespace_; 412 } 413 414 // required bytes qualifier = 2; 415 public static final int QUALIFIER_FIELD_NUMBER = 2; 416 private com.google.protobuf.ByteString qualifier_; 417 /** 418 * <code>required bytes qualifier = 2;</code> 419 */ hasQualifier()420 public boolean hasQualifier() { 421 return ((bitField0_ & 0x00000002) == 0x00000002); 422 } 423 /** 424 * <code>required bytes qualifier = 2;</code> 425 */ getQualifier()426 public com.google.protobuf.ByteString getQualifier() { 427 return qualifier_; 428 } 429 initFields()430 private void initFields() { 431 namespace_ = com.google.protobuf.ByteString.EMPTY; 432 qualifier_ = com.google.protobuf.ByteString.EMPTY; 433 } 434 private byte memoizedIsInitialized = -1; isInitialized()435 public final boolean isInitialized() { 436 byte isInitialized = memoizedIsInitialized; 437 if (isInitialized != -1) return isInitialized == 1; 438 439 if (!hasNamespace()) { 440 memoizedIsInitialized = 0; 441 return false; 442 } 443 if (!hasQualifier()) { 444 memoizedIsInitialized = 0; 445 return false; 446 } 447 memoizedIsInitialized = 1; 448 return true; 449 } 450 writeTo(com.google.protobuf.CodedOutputStream output)451 public void writeTo(com.google.protobuf.CodedOutputStream output) 452 throws java.io.IOException { 453 getSerializedSize(); 454 if (((bitField0_ & 0x00000001) == 0x00000001)) { 455 output.writeBytes(1, namespace_); 456 } 457 if (((bitField0_ & 0x00000002) == 0x00000002)) { 458 output.writeBytes(2, qualifier_); 459 } 460 getUnknownFields().writeTo(output); 461 } 462 463 private int memoizedSerializedSize = -1; getSerializedSize()464 public int getSerializedSize() { 465 int size = memoizedSerializedSize; 466 if (size != -1) return size; 467 468 size = 0; 469 if (((bitField0_ & 0x00000001) == 0x00000001)) { 470 size += com.google.protobuf.CodedOutputStream 471 .computeBytesSize(1, namespace_); 472 } 473 if (((bitField0_ & 0x00000002) == 0x00000002)) { 474 size += com.google.protobuf.CodedOutputStream 475 .computeBytesSize(2, qualifier_); 476 } 477 size += getUnknownFields().getSerializedSize(); 478 memoizedSerializedSize = size; 479 return size; 480 } 481 482 private static final long serialVersionUID = 0L; 483 @java.lang.Override writeReplace()484 protected java.lang.Object writeReplace() 485 throws java.io.ObjectStreamException { 486 return super.writeReplace(); 487 } 488 489 @java.lang.Override equals(final java.lang.Object obj)490 public boolean equals(final java.lang.Object obj) { 491 if (obj == this) { 492 return true; 493 } 494 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName)) { 495 return super.equals(obj); 496 } 497 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) obj; 498 499 boolean result = true; 500 result = result && (hasNamespace() == other.hasNamespace()); 501 if (hasNamespace()) { 502 result = result && getNamespace() 503 .equals(other.getNamespace()); 504 } 505 result = result && (hasQualifier() == other.hasQualifier()); 506 if (hasQualifier()) { 507 result = result && getQualifier() 508 .equals(other.getQualifier()); 509 } 510 result = result && 511 getUnknownFields().equals(other.getUnknownFields()); 512 return result; 513 } 514 515 private int memoizedHashCode = 0; 516 @java.lang.Override hashCode()517 public int hashCode() { 518 if (memoizedHashCode != 0) { 519 return memoizedHashCode; 520 } 521 int hash = 41; 522 hash = (19 * hash) + getDescriptorForType().hashCode(); 523 if (hasNamespace()) { 524 hash = (37 * hash) + NAMESPACE_FIELD_NUMBER; 525 hash = (53 * hash) + getNamespace().hashCode(); 526 } 527 if (hasQualifier()) { 528 hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; 529 hash = (53 * hash) + getQualifier().hashCode(); 530 } 531 hash = (29 * hash) + getUnknownFields().hashCode(); 532 memoizedHashCode = hash; 533 return hash; 534 } 535 parseFrom( com.google.protobuf.ByteString data)536 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( 537 com.google.protobuf.ByteString data) 538 throws com.google.protobuf.InvalidProtocolBufferException { 539 return PARSER.parseFrom(data); 540 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)541 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( 542 com.google.protobuf.ByteString data, 543 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 544 throws com.google.protobuf.InvalidProtocolBufferException { 545 return PARSER.parseFrom(data, extensionRegistry); 546 } parseFrom(byte[] data)547 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(byte[] data) 548 throws com.google.protobuf.InvalidProtocolBufferException { 549 return PARSER.parseFrom(data); 550 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)551 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( 552 byte[] data, 553 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 554 throws com.google.protobuf.InvalidProtocolBufferException { 555 return PARSER.parseFrom(data, extensionRegistry); 556 } parseFrom(java.io.InputStream input)557 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom(java.io.InputStream input) 558 throws java.io.IOException { 559 return PARSER.parseFrom(input); 560 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)561 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( 562 java.io.InputStream input, 563 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 564 throws java.io.IOException { 565 return PARSER.parseFrom(input, extensionRegistry); 566 } parseDelimitedFrom(java.io.InputStream input)567 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom(java.io.InputStream input) 568 throws java.io.IOException { 569 return PARSER.parseDelimitedFrom(input); 570 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)571 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseDelimitedFrom( 572 java.io.InputStream input, 573 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 574 throws java.io.IOException { 575 return PARSER.parseDelimitedFrom(input, extensionRegistry); 576 } parseFrom( com.google.protobuf.CodedInputStream input)577 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( 578 com.google.protobuf.CodedInputStream input) 579 throws java.io.IOException { 580 return PARSER.parseFrom(input); 581 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)582 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parseFrom( 583 com.google.protobuf.CodedInputStream input, 584 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 585 throws java.io.IOException { 586 return PARSER.parseFrom(input, extensionRegistry); 587 } 588 newBuilder()589 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()590 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName prototype)591 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName prototype) { 592 return newBuilder().mergeFrom(prototype); 593 } toBuilder()594 public Builder toBuilder() { return newBuilder(this); } 595 596 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)597 protected Builder newBuilderForType( 598 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 599 Builder builder = new Builder(parent); 600 return builder; 601 } 602 /** 603 * Protobuf type {@code TableName} 604 * 605 * <pre> 606 ** 607 * Table Name 608 * </pre> 609 */ 610 public static final class Builder extends 611 com.google.protobuf.GeneratedMessage.Builder<Builder> 612 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder { 613 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()614 getDescriptor() { 615 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableName_descriptor; 616 } 617 618 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()619 internalGetFieldAccessorTable() { 620 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableName_fieldAccessorTable 621 .ensureFieldAccessorsInitialized( 622 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder.class); 623 } 624 625 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder() Builder()626 private Builder() { 627 maybeForceBuilderInitialization(); 628 } 629 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)630 private Builder( 631 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 632 super(parent); 633 maybeForceBuilderInitialization(); 634 } maybeForceBuilderInitialization()635 private void maybeForceBuilderInitialization() { 636 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 637 } 638 } create()639 private static Builder create() { 640 return new Builder(); 641 } 642 clear()643 public Builder clear() { 644 super.clear(); 645 namespace_ = com.google.protobuf.ByteString.EMPTY; 646 bitField0_ = (bitField0_ & ~0x00000001); 647 qualifier_ = com.google.protobuf.ByteString.EMPTY; 648 bitField0_ = (bitField0_ & ~0x00000002); 649 return this; 650 } 651 clone()652 public Builder clone() { 653 return create().mergeFrom(buildPartial()); 654 } 655 656 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()657 getDescriptorForType() { 658 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableName_descriptor; 659 } 660 getDefaultInstanceForType()661 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getDefaultInstanceForType() { 662 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 663 } 664 build()665 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName build() { 666 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName result = buildPartial(); 667 if (!result.isInitialized()) { 668 throw newUninitializedMessageException(result); 669 } 670 return result; 671 } 672 buildPartial()673 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName buildPartial() { 674 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName(this); 675 int from_bitField0_ = bitField0_; 676 int to_bitField0_ = 0; 677 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 678 to_bitField0_ |= 0x00000001; 679 } 680 result.namespace_ = namespace_; 681 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 682 to_bitField0_ |= 0x00000002; 683 } 684 result.qualifier_ = qualifier_; 685 result.bitField0_ = to_bitField0_; 686 onBuilt(); 687 return result; 688 } 689 mergeFrom(com.google.protobuf.Message other)690 public Builder mergeFrom(com.google.protobuf.Message other) { 691 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) { 692 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName)other); 693 } else { 694 super.mergeFrom(other); 695 return this; 696 } 697 } 698 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other)699 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName other) { 700 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) return this; 701 if (other.hasNamespace()) { 702 setNamespace(other.getNamespace()); 703 } 704 if (other.hasQualifier()) { 705 setQualifier(other.getQualifier()); 706 } 707 this.mergeUnknownFields(other.getUnknownFields()); 708 return this; 709 } 710 isInitialized()711 public final boolean isInitialized() { 712 if (!hasNamespace()) { 713 714 return false; 715 } 716 if (!hasQualifier()) { 717 718 return false; 719 } 720 return true; 721 } 722 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)723 public Builder mergeFrom( 724 com.google.protobuf.CodedInputStream input, 725 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 726 throws java.io.IOException { 727 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName parsedMessage = null; 728 try { 729 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 730 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 731 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName) e.getUnfinishedMessage(); 732 throw e; 733 } finally { 734 if (parsedMessage != null) { 735 mergeFrom(parsedMessage); 736 } 737 } 738 return this; 739 } 740 private int bitField0_; 741 742 // required bytes namespace = 1; 743 private com.google.protobuf.ByteString namespace_ = com.google.protobuf.ByteString.EMPTY; 744 /** 745 * <code>required bytes namespace = 1;</code> 746 */ hasNamespace()747 public boolean hasNamespace() { 748 return ((bitField0_ & 0x00000001) == 0x00000001); 749 } 750 /** 751 * <code>required bytes namespace = 1;</code> 752 */ getNamespace()753 public com.google.protobuf.ByteString getNamespace() { 754 return namespace_; 755 } 756 /** 757 * <code>required bytes namespace = 1;</code> 758 */ setNamespace(com.google.protobuf.ByteString value)759 public Builder setNamespace(com.google.protobuf.ByteString value) { 760 if (value == null) { 761 throw new NullPointerException(); 762 } 763 bitField0_ |= 0x00000001; 764 namespace_ = value; 765 onChanged(); 766 return this; 767 } 768 /** 769 * <code>required bytes namespace = 1;</code> 770 */ clearNamespace()771 public Builder clearNamespace() { 772 bitField0_ = (bitField0_ & ~0x00000001); 773 namespace_ = getDefaultInstance().getNamespace(); 774 onChanged(); 775 return this; 776 } 777 778 // required bytes qualifier = 2; 779 private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; 780 /** 781 * <code>required bytes qualifier = 2;</code> 782 */ hasQualifier()783 public boolean hasQualifier() { 784 return ((bitField0_ & 0x00000002) == 0x00000002); 785 } 786 /** 787 * <code>required bytes qualifier = 2;</code> 788 */ getQualifier()789 public com.google.protobuf.ByteString getQualifier() { 790 return qualifier_; 791 } 792 /** 793 * <code>required bytes qualifier = 2;</code> 794 */ setQualifier(com.google.protobuf.ByteString value)795 public Builder setQualifier(com.google.protobuf.ByteString value) { 796 if (value == null) { 797 throw new NullPointerException(); 798 } 799 bitField0_ |= 0x00000002; 800 qualifier_ = value; 801 onChanged(); 802 return this; 803 } 804 /** 805 * <code>required bytes qualifier = 2;</code> 806 */ clearQualifier()807 public Builder clearQualifier() { 808 bitField0_ = (bitField0_ & ~0x00000002); 809 qualifier_ = getDefaultInstance().getQualifier(); 810 onChanged(); 811 return this; 812 } 813 814 // @@protoc_insertion_point(builder_scope:TableName) 815 } 816 817 static { 818 defaultInstance = new TableName(true); defaultInstance.initFields()819 defaultInstance.initFields(); 820 } 821 822 // @@protoc_insertion_point(class_scope:TableName) 823 } 824 825 public interface TableSchemaOrBuilder 826 extends com.google.protobuf.MessageOrBuilder { 827 828 // optional .TableName table_name = 1; 829 /** 830 * <code>optional .TableName table_name = 1;</code> 831 */ hasTableName()832 boolean hasTableName(); 833 /** 834 * <code>optional .TableName table_name = 1;</code> 835 */ getTableName()836 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); 837 /** 838 * <code>optional .TableName table_name = 1;</code> 839 */ getTableNameOrBuilder()840 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); 841 842 // repeated .BytesBytesPair attributes = 2; 843 /** 844 * <code>repeated .BytesBytesPair attributes = 2;</code> 845 */ 846 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList()847 getAttributesList(); 848 /** 849 * <code>repeated .BytesBytesPair attributes = 2;</code> 850 */ getAttributes(int index)851 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); 852 /** 853 * <code>repeated .BytesBytesPair attributes = 2;</code> 854 */ getAttributesCount()855 int getAttributesCount(); 856 /** 857 * <code>repeated .BytesBytesPair attributes = 2;</code> 858 */ 859 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList()860 getAttributesOrBuilderList(); 861 /** 862 * <code>repeated .BytesBytesPair attributes = 2;</code> 863 */ getAttributesOrBuilder( int index)864 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( 865 int index); 866 867 // repeated .ColumnFamilySchema column_families = 3; 868 /** 869 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 870 */ 871 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList()872 getColumnFamiliesList(); 873 /** 874 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 875 */ getColumnFamilies(int index)876 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index); 877 /** 878 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 879 */ getColumnFamiliesCount()880 int getColumnFamiliesCount(); 881 /** 882 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 883 */ 884 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList()885 getColumnFamiliesOrBuilderList(); 886 /** 887 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 888 */ getColumnFamiliesOrBuilder( int index)889 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( 890 int index); 891 892 // repeated .NameStringPair configuration = 4; 893 /** 894 * <code>repeated .NameStringPair configuration = 4;</code> 895 */ 896 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList()897 getConfigurationList(); 898 /** 899 * <code>repeated .NameStringPair configuration = 4;</code> 900 */ getConfiguration(int index)901 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); 902 /** 903 * <code>repeated .NameStringPair configuration = 4;</code> 904 */ getConfigurationCount()905 int getConfigurationCount(); 906 /** 907 * <code>repeated .NameStringPair configuration = 4;</code> 908 */ 909 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()910 getConfigurationOrBuilderList(); 911 /** 912 * <code>repeated .NameStringPair configuration = 4;</code> 913 */ getConfigurationOrBuilder( int index)914 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 915 int index); 916 } 917 /** 918 * Protobuf type {@code TableSchema} 919 * 920 * <pre> 921 ** 922 * Table Schema 923 * Inspired by the rest TableSchema 924 * </pre> 925 */ 926 public static final class TableSchema extends 927 com.google.protobuf.GeneratedMessage 928 implements TableSchemaOrBuilder { 929 // Use TableSchema.newBuilder() to construct. TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder)930 private TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 931 super(builder); 932 this.unknownFields = builder.getUnknownFields(); 933 } TableSchema(boolean noInit)934 private TableSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 935 936 private static final TableSchema defaultInstance; getDefaultInstance()937 public static TableSchema getDefaultInstance() { 938 return defaultInstance; 939 } 940 getDefaultInstanceForType()941 public TableSchema getDefaultInstanceForType() { 942 return defaultInstance; 943 } 944 945 private final com.google.protobuf.UnknownFieldSet unknownFields; 946 @java.lang.Override 947 public final com.google.protobuf.UnknownFieldSet getUnknownFields()948 getUnknownFields() { 949 return this.unknownFields; 950 } TableSchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)951 private TableSchema( 952 com.google.protobuf.CodedInputStream input, 953 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 954 throws com.google.protobuf.InvalidProtocolBufferException { 955 initFields(); 956 int mutable_bitField0_ = 0; 957 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 958 com.google.protobuf.UnknownFieldSet.newBuilder(); 959 try { 960 boolean done = false; 961 while (!done) { 962 int tag = input.readTag(); 963 switch (tag) { 964 case 0: 965 done = true; 966 break; 967 default: { 968 if (!parseUnknownField(input, unknownFields, 969 extensionRegistry, tag)) { 970 done = true; 971 } 972 break; 973 } 974 case 10: { 975 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; 976 if (((bitField0_ & 0x00000001) == 0x00000001)) { 977 subBuilder = tableName_.toBuilder(); 978 } 979 tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); 980 if (subBuilder != null) { 981 subBuilder.mergeFrom(tableName_); 982 tableName_ = subBuilder.buildPartial(); 983 } 984 bitField0_ |= 0x00000001; 985 break; 986 } 987 case 18: { 988 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 989 attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(); 990 mutable_bitField0_ |= 0x00000002; 991 } 992 attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); 993 break; 994 } 995 case 26: { 996 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 997 columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>(); 998 mutable_bitField0_ |= 0x00000004; 999 } 1000 columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry)); 1001 break; 1002 } 1003 case 34: { 1004 if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { 1005 configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); 1006 mutable_bitField0_ |= 0x00000008; 1007 } 1008 configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); 1009 break; 1010 } 1011 } 1012 } 1013 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1014 throw e.setUnfinishedMessage(this); 1015 } catch (java.io.IOException e) { 1016 throw new com.google.protobuf.InvalidProtocolBufferException( 1017 e.getMessage()).setUnfinishedMessage(this); 1018 } finally { 1019 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 1020 attributes_ = java.util.Collections.unmodifiableList(attributes_); 1021 } 1022 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 1023 columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); 1024 } 1025 if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { 1026 configuration_ = java.util.Collections.unmodifiableList(configuration_); 1027 } 1028 this.unknownFields = unknownFields.build(); 1029 makeExtensionsImmutable(); 1030 } 1031 } 1032 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1033 getDescriptor() { 1034 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; 1035 } 1036 1037 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1038 internalGetFieldAccessorTable() { 1039 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable 1040 .ensureFieldAccessorsInitialized( 1041 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); 1042 } 1043 1044 public static com.google.protobuf.Parser<TableSchema> PARSER = 1045 new com.google.protobuf.AbstractParser<TableSchema>() { 1046 public TableSchema parsePartialFrom( 1047 com.google.protobuf.CodedInputStream input, 1048 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1049 throws com.google.protobuf.InvalidProtocolBufferException { 1050 return new TableSchema(input, extensionRegistry); 1051 } 1052 }; 1053 1054 @java.lang.Override getParserForType()1055 public com.google.protobuf.Parser<TableSchema> getParserForType() { 1056 return PARSER; 1057 } 1058 1059 private int bitField0_; 1060 // optional .TableName table_name = 1; 1061 public static final int TABLE_NAME_FIELD_NUMBER = 1; 1062 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; 1063 /** 1064 * <code>optional .TableName table_name = 1;</code> 1065 */ hasTableName()1066 public boolean hasTableName() { 1067 return ((bitField0_ & 0x00000001) == 0x00000001); 1068 } 1069 /** 1070 * <code>optional .TableName table_name = 1;</code> 1071 */ getTableName()1072 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { 1073 return tableName_; 1074 } 1075 /** 1076 * <code>optional .TableName table_name = 1;</code> 1077 */ getTableNameOrBuilder()1078 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { 1079 return tableName_; 1080 } 1081 1082 // repeated .BytesBytesPair attributes = 2; 1083 public static final int ATTRIBUTES_FIELD_NUMBER = 2; 1084 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_; 1085 /** 1086 * <code>repeated .BytesBytesPair attributes = 2;</code> 1087 */ getAttributesList()1088 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { 1089 return attributes_; 1090 } 1091 /** 1092 * <code>repeated .BytesBytesPair attributes = 2;</code> 1093 */ 1094 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList()1095 getAttributesOrBuilderList() { 1096 return attributes_; 1097 } 1098 /** 1099 * <code>repeated .BytesBytesPair attributes = 2;</code> 1100 */ getAttributesCount()1101 public int getAttributesCount() { 1102 return attributes_.size(); 1103 } 1104 /** 1105 * <code>repeated .BytesBytesPair attributes = 2;</code> 1106 */ getAttributes(int index)1107 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { 1108 return attributes_.get(index); 1109 } 1110 /** 1111 * <code>repeated .BytesBytesPair attributes = 2;</code> 1112 */ getAttributesOrBuilder( int index)1113 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( 1114 int index) { 1115 return attributes_.get(index); 1116 } 1117 1118 // repeated .ColumnFamilySchema column_families = 3; 1119 public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3; 1120 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_; 1121 /** 1122 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 1123 */ getColumnFamiliesList()1124 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() { 1125 return columnFamilies_; 1126 } 1127 /** 1128 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 1129 */ 1130 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList()1131 getColumnFamiliesOrBuilderList() { 1132 return columnFamilies_; 1133 } 1134 /** 1135 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 1136 */ getColumnFamiliesCount()1137 public int getColumnFamiliesCount() { 1138 return columnFamilies_.size(); 1139 } 1140 /** 1141 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 1142 */ getColumnFamilies(int index)1143 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { 1144 return columnFamilies_.get(index); 1145 } 1146 /** 1147 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 1148 */ getColumnFamiliesOrBuilder( int index)1149 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( 1150 int index) { 1151 return columnFamilies_.get(index); 1152 } 1153 1154 // repeated .NameStringPair configuration = 4; 1155 public static final int CONFIGURATION_FIELD_NUMBER = 4; 1156 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; 1157 /** 1158 * <code>repeated .NameStringPair configuration = 4;</code> 1159 */ getConfigurationList()1160 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { 1161 return configuration_; 1162 } 1163 /** 1164 * <code>repeated .NameStringPair configuration = 4;</code> 1165 */ 1166 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()1167 getConfigurationOrBuilderList() { 1168 return configuration_; 1169 } 1170 /** 1171 * <code>repeated .NameStringPair configuration = 4;</code> 1172 */ getConfigurationCount()1173 public int getConfigurationCount() { 1174 return configuration_.size(); 1175 } 1176 /** 1177 * <code>repeated .NameStringPair configuration = 4;</code> 1178 */ getConfiguration(int index)1179 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { 1180 return configuration_.get(index); 1181 } 1182 /** 1183 * <code>repeated .NameStringPair configuration = 4;</code> 1184 */ getConfigurationOrBuilder( int index)1185 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 1186 int index) { 1187 return configuration_.get(index); 1188 } 1189 initFields()1190 private void initFields() { 1191 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 1192 attributes_ = java.util.Collections.emptyList(); 1193 columnFamilies_ = java.util.Collections.emptyList(); 1194 configuration_ = java.util.Collections.emptyList(); 1195 } 1196 private byte memoizedIsInitialized = -1; isInitialized()1197 public final boolean isInitialized() { 1198 byte isInitialized = memoizedIsInitialized; 1199 if (isInitialized != -1) return isInitialized == 1; 1200 1201 if (hasTableName()) { 1202 if (!getTableName().isInitialized()) { 1203 memoizedIsInitialized = 0; 1204 return false; 1205 } 1206 } 1207 for (int i = 0; i < getAttributesCount(); i++) { 1208 if (!getAttributes(i).isInitialized()) { 1209 memoizedIsInitialized = 0; 1210 return false; 1211 } 1212 } 1213 for (int i = 0; i < getColumnFamiliesCount(); i++) { 1214 if (!getColumnFamilies(i).isInitialized()) { 1215 memoizedIsInitialized = 0; 1216 return false; 1217 } 1218 } 1219 for (int i = 0; i < getConfigurationCount(); i++) { 1220 if (!getConfiguration(i).isInitialized()) { 1221 memoizedIsInitialized = 0; 1222 return false; 1223 } 1224 } 1225 memoizedIsInitialized = 1; 1226 return true; 1227 } 1228 writeTo(com.google.protobuf.CodedOutputStream output)1229 public void writeTo(com.google.protobuf.CodedOutputStream output) 1230 throws java.io.IOException { 1231 getSerializedSize(); 1232 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1233 output.writeMessage(1, tableName_); 1234 } 1235 for (int i = 0; i < attributes_.size(); i++) { 1236 output.writeMessage(2, attributes_.get(i)); 1237 } 1238 for (int i = 0; i < columnFamilies_.size(); i++) { 1239 output.writeMessage(3, columnFamilies_.get(i)); 1240 } 1241 for (int i = 0; i < configuration_.size(); i++) { 1242 output.writeMessage(4, configuration_.get(i)); 1243 } 1244 getUnknownFields().writeTo(output); 1245 } 1246 1247 private int memoizedSerializedSize = -1; getSerializedSize()1248 public int getSerializedSize() { 1249 int size = memoizedSerializedSize; 1250 if (size != -1) return size; 1251 1252 size = 0; 1253 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1254 size += com.google.protobuf.CodedOutputStream 1255 .computeMessageSize(1, tableName_); 1256 } 1257 for (int i = 0; i < attributes_.size(); i++) { 1258 size += com.google.protobuf.CodedOutputStream 1259 .computeMessageSize(2, attributes_.get(i)); 1260 } 1261 for (int i = 0; i < columnFamilies_.size(); i++) { 1262 size += com.google.protobuf.CodedOutputStream 1263 .computeMessageSize(3, columnFamilies_.get(i)); 1264 } 1265 for (int i = 0; i < configuration_.size(); i++) { 1266 size += com.google.protobuf.CodedOutputStream 1267 .computeMessageSize(4, configuration_.get(i)); 1268 } 1269 size += getUnknownFields().getSerializedSize(); 1270 memoizedSerializedSize = size; 1271 return size; 1272 } 1273 1274 private static final long serialVersionUID = 0L; 1275 @java.lang.Override writeReplace()1276 protected java.lang.Object writeReplace() 1277 throws java.io.ObjectStreamException { 1278 return super.writeReplace(); 1279 } 1280 1281 @java.lang.Override equals(final java.lang.Object obj)1282 public boolean equals(final java.lang.Object obj) { 1283 if (obj == this) { 1284 return true; 1285 } 1286 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)) { 1287 return super.equals(obj); 1288 } 1289 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) obj; 1290 1291 boolean result = true; 1292 result = result && (hasTableName() == other.hasTableName()); 1293 if (hasTableName()) { 1294 result = result && getTableName() 1295 .equals(other.getTableName()); 1296 } 1297 result = result && getAttributesList() 1298 .equals(other.getAttributesList()); 1299 result = result && getColumnFamiliesList() 1300 .equals(other.getColumnFamiliesList()); 1301 result = result && getConfigurationList() 1302 .equals(other.getConfigurationList()); 1303 result = result && 1304 getUnknownFields().equals(other.getUnknownFields()); 1305 return result; 1306 } 1307 1308 private int memoizedHashCode = 0; 1309 @java.lang.Override hashCode()1310 public int hashCode() { 1311 if (memoizedHashCode != 0) { 1312 return memoizedHashCode; 1313 } 1314 int hash = 41; 1315 hash = (19 * hash) + getDescriptorForType().hashCode(); 1316 if (hasTableName()) { 1317 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; 1318 hash = (53 * hash) + getTableName().hashCode(); 1319 } 1320 if (getAttributesCount() > 0) { 1321 hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; 1322 hash = (53 * hash) + getAttributesList().hashCode(); 1323 } 1324 if (getColumnFamiliesCount() > 0) { 1325 hash = (37 * hash) + COLUMN_FAMILIES_FIELD_NUMBER; 1326 hash = (53 * hash) + getColumnFamiliesList().hashCode(); 1327 } 1328 if (getConfigurationCount() > 0) { 1329 hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; 1330 hash = (53 * hash) + getConfigurationList().hashCode(); 1331 } 1332 hash = (29 * hash) + getUnknownFields().hashCode(); 1333 memoizedHashCode = hash; 1334 return hash; 1335 } 1336 parseFrom( com.google.protobuf.ByteString data)1337 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( 1338 com.google.protobuf.ByteString data) 1339 throws com.google.protobuf.InvalidProtocolBufferException { 1340 return PARSER.parseFrom(data); 1341 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1342 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( 1343 com.google.protobuf.ByteString data, 1344 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1345 throws com.google.protobuf.InvalidProtocolBufferException { 1346 return PARSER.parseFrom(data, extensionRegistry); 1347 } parseFrom(byte[] data)1348 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(byte[] data) 1349 throws com.google.protobuf.InvalidProtocolBufferException { 1350 return PARSER.parseFrom(data); 1351 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1352 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( 1353 byte[] data, 1354 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1355 throws com.google.protobuf.InvalidProtocolBufferException { 1356 return PARSER.parseFrom(data, extensionRegistry); 1357 } parseFrom(java.io.InputStream input)1358 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom(java.io.InputStream input) 1359 throws java.io.IOException { 1360 return PARSER.parseFrom(input); 1361 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1362 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( 1363 java.io.InputStream input, 1364 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1365 throws java.io.IOException { 1366 return PARSER.parseFrom(input, extensionRegistry); 1367 } parseDelimitedFrom(java.io.InputStream input)1368 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom(java.io.InputStream input) 1369 throws java.io.IOException { 1370 return PARSER.parseDelimitedFrom(input); 1371 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1372 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseDelimitedFrom( 1373 java.io.InputStream input, 1374 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1375 throws java.io.IOException { 1376 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1377 } parseFrom( com.google.protobuf.CodedInputStream input)1378 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( 1379 com.google.protobuf.CodedInputStream input) 1380 throws java.io.IOException { 1381 return PARSER.parseFrom(input); 1382 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1383 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parseFrom( 1384 com.google.protobuf.CodedInputStream input, 1385 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1386 throws java.io.IOException { 1387 return PARSER.parseFrom(input, extensionRegistry); 1388 } 1389 newBuilder()1390 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()1391 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype)1392 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema prototype) { 1393 return newBuilder().mergeFrom(prototype); 1394 } toBuilder()1395 public Builder toBuilder() { return newBuilder(this); } 1396 1397 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1398 protected Builder newBuilderForType( 1399 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1400 Builder builder = new Builder(parent); 1401 return builder; 1402 } 1403 /** 1404 * Protobuf type {@code TableSchema} 1405 * 1406 * <pre> 1407 ** 1408 * Table Schema 1409 * Inspired by the rest TableSchema 1410 * </pre> 1411 */ 1412 public static final class Builder extends 1413 com.google.protobuf.GeneratedMessage.Builder<Builder> 1414 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder { 1415 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1416 getDescriptor() { 1417 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; 1418 } 1419 1420 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1421 internalGetFieldAccessorTable() { 1422 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_fieldAccessorTable 1423 .ensureFieldAccessorsInitialized( 1424 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); 1425 } 1426 1427 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder() Builder()1428 private Builder() { 1429 maybeForceBuilderInitialization(); 1430 } 1431 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1432 private Builder( 1433 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1434 super(parent); 1435 maybeForceBuilderInitialization(); 1436 } maybeForceBuilderInitialization()1437 private void maybeForceBuilderInitialization() { 1438 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1439 getTableNameFieldBuilder(); 1440 getAttributesFieldBuilder(); 1441 getColumnFamiliesFieldBuilder(); 1442 getConfigurationFieldBuilder(); 1443 } 1444 } create()1445 private static Builder create() { 1446 return new Builder(); 1447 } 1448 clear()1449 public Builder clear() { 1450 super.clear(); 1451 if (tableNameBuilder_ == null) { 1452 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 1453 } else { 1454 tableNameBuilder_.clear(); 1455 } 1456 bitField0_ = (bitField0_ & ~0x00000001); 1457 if (attributesBuilder_ == null) { 1458 attributes_ = java.util.Collections.emptyList(); 1459 bitField0_ = (bitField0_ & ~0x00000002); 1460 } else { 1461 attributesBuilder_.clear(); 1462 } 1463 if (columnFamiliesBuilder_ == null) { 1464 columnFamilies_ = java.util.Collections.emptyList(); 1465 bitField0_ = (bitField0_ & ~0x00000004); 1466 } else { 1467 columnFamiliesBuilder_.clear(); 1468 } 1469 if (configurationBuilder_ == null) { 1470 configuration_ = java.util.Collections.emptyList(); 1471 bitField0_ = (bitField0_ & ~0x00000008); 1472 } else { 1473 configurationBuilder_.clear(); 1474 } 1475 return this; 1476 } 1477 clone()1478 public Builder clone() { 1479 return create().mergeFrom(buildPartial()); 1480 } 1481 1482 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()1483 getDescriptorForType() { 1484 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TableSchema_descriptor; 1485 } 1486 getDefaultInstanceForType()1487 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getDefaultInstanceForType() { 1488 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance(); 1489 } 1490 build()1491 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema build() { 1492 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = buildPartial(); 1493 if (!result.isInitialized()) { 1494 throw newUninitializedMessageException(result); 1495 } 1496 return result; 1497 } 1498 buildPartial()1499 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema buildPartial() { 1500 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema(this); 1501 int from_bitField0_ = bitField0_; 1502 int to_bitField0_ = 0; 1503 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1504 to_bitField0_ |= 0x00000001; 1505 } 1506 if (tableNameBuilder_ == null) { 1507 result.tableName_ = tableName_; 1508 } else { 1509 result.tableName_ = tableNameBuilder_.build(); 1510 } 1511 if (attributesBuilder_ == null) { 1512 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1513 attributes_ = java.util.Collections.unmodifiableList(attributes_); 1514 bitField0_ = (bitField0_ & ~0x00000002); 1515 } 1516 result.attributes_ = attributes_; 1517 } else { 1518 result.attributes_ = attributesBuilder_.build(); 1519 } 1520 if (columnFamiliesBuilder_ == null) { 1521 if (((bitField0_ & 0x00000004) == 0x00000004)) { 1522 columnFamilies_ = java.util.Collections.unmodifiableList(columnFamilies_); 1523 bitField0_ = (bitField0_ & ~0x00000004); 1524 } 1525 result.columnFamilies_ = columnFamilies_; 1526 } else { 1527 result.columnFamilies_ = columnFamiliesBuilder_.build(); 1528 } 1529 if (configurationBuilder_ == null) { 1530 if (((bitField0_ & 0x00000008) == 0x00000008)) { 1531 configuration_ = java.util.Collections.unmodifiableList(configuration_); 1532 bitField0_ = (bitField0_ & ~0x00000008); 1533 } 1534 result.configuration_ = configuration_; 1535 } else { 1536 result.configuration_ = configurationBuilder_.build(); 1537 } 1538 result.bitField0_ = to_bitField0_; 1539 onBuilt(); 1540 return result; 1541 } 1542 mergeFrom(com.google.protobuf.Message other)1543 public Builder mergeFrom(com.google.protobuf.Message other) { 1544 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) { 1545 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema)other); 1546 } else { 1547 super.mergeFrom(other); 1548 return this; 1549 } 1550 } 1551 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other)1552 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema other) { 1553 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) return this; 1554 if (other.hasTableName()) { 1555 mergeTableName(other.getTableName()); 1556 } 1557 if (attributesBuilder_ == null) { 1558 if (!other.attributes_.isEmpty()) { 1559 if (attributes_.isEmpty()) { 1560 attributes_ = other.attributes_; 1561 bitField0_ = (bitField0_ & ~0x00000002); 1562 } else { 1563 ensureAttributesIsMutable(); 1564 attributes_.addAll(other.attributes_); 1565 } 1566 onChanged(); 1567 } 1568 } else { 1569 if (!other.attributes_.isEmpty()) { 1570 if (attributesBuilder_.isEmpty()) { 1571 attributesBuilder_.dispose(); 1572 attributesBuilder_ = null; 1573 attributes_ = other.attributes_; 1574 bitField0_ = (bitField0_ & ~0x00000002); 1575 attributesBuilder_ = 1576 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 1577 getAttributesFieldBuilder() : null; 1578 } else { 1579 attributesBuilder_.addAllMessages(other.attributes_); 1580 } 1581 } 1582 } 1583 if (columnFamiliesBuilder_ == null) { 1584 if (!other.columnFamilies_.isEmpty()) { 1585 if (columnFamilies_.isEmpty()) { 1586 columnFamilies_ = other.columnFamilies_; 1587 bitField0_ = (bitField0_ & ~0x00000004); 1588 } else { 1589 ensureColumnFamiliesIsMutable(); 1590 columnFamilies_.addAll(other.columnFamilies_); 1591 } 1592 onChanged(); 1593 } 1594 } else { 1595 if (!other.columnFamilies_.isEmpty()) { 1596 if (columnFamiliesBuilder_.isEmpty()) { 1597 columnFamiliesBuilder_.dispose(); 1598 columnFamiliesBuilder_ = null; 1599 columnFamilies_ = other.columnFamilies_; 1600 bitField0_ = (bitField0_ & ~0x00000004); 1601 columnFamiliesBuilder_ = 1602 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 1603 getColumnFamiliesFieldBuilder() : null; 1604 } else { 1605 columnFamiliesBuilder_.addAllMessages(other.columnFamilies_); 1606 } 1607 } 1608 } 1609 if (configurationBuilder_ == null) { 1610 if (!other.configuration_.isEmpty()) { 1611 if (configuration_.isEmpty()) { 1612 configuration_ = other.configuration_; 1613 bitField0_ = (bitField0_ & ~0x00000008); 1614 } else { 1615 ensureConfigurationIsMutable(); 1616 configuration_.addAll(other.configuration_); 1617 } 1618 onChanged(); 1619 } 1620 } else { 1621 if (!other.configuration_.isEmpty()) { 1622 if (configurationBuilder_.isEmpty()) { 1623 configurationBuilder_.dispose(); 1624 configurationBuilder_ = null; 1625 configuration_ = other.configuration_; 1626 bitField0_ = (bitField0_ & ~0x00000008); 1627 configurationBuilder_ = 1628 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 1629 getConfigurationFieldBuilder() : null; 1630 } else { 1631 configurationBuilder_.addAllMessages(other.configuration_); 1632 } 1633 } 1634 } 1635 this.mergeUnknownFields(other.getUnknownFields()); 1636 return this; 1637 } 1638 isInitialized()1639 public final boolean isInitialized() { 1640 if (hasTableName()) { 1641 if (!getTableName().isInitialized()) { 1642 1643 return false; 1644 } 1645 } 1646 for (int i = 0; i < getAttributesCount(); i++) { 1647 if (!getAttributes(i).isInitialized()) { 1648 1649 return false; 1650 } 1651 } 1652 for (int i = 0; i < getColumnFamiliesCount(); i++) { 1653 if (!getColumnFamilies(i).isInitialized()) { 1654 1655 return false; 1656 } 1657 } 1658 for (int i = 0; i < getConfigurationCount(); i++) { 1659 if (!getConfiguration(i).isInitialized()) { 1660 1661 return false; 1662 } 1663 } 1664 return true; 1665 } 1666 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1667 public Builder mergeFrom( 1668 com.google.protobuf.CodedInputStream input, 1669 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1670 throws java.io.IOException { 1671 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema parsedMessage = null; 1672 try { 1673 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1674 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1675 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema) e.getUnfinishedMessage(); 1676 throw e; 1677 } finally { 1678 if (parsedMessage != null) { 1679 mergeFrom(parsedMessage); 1680 } 1681 } 1682 return this; 1683 } 1684 private int bitField0_; 1685 1686 // optional .TableName table_name = 1; 1687 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 1688 private com.google.protobuf.SingleFieldBuilder< 1689 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; 1690 /** 1691 * <code>optional .TableName table_name = 1;</code> 1692 */ hasTableName()1693 public boolean hasTableName() { 1694 return ((bitField0_ & 0x00000001) == 0x00000001); 1695 } 1696 /** 1697 * <code>optional .TableName table_name = 1;</code> 1698 */ getTableName()1699 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { 1700 if (tableNameBuilder_ == null) { 1701 return tableName_; 1702 } else { 1703 return tableNameBuilder_.getMessage(); 1704 } 1705 } 1706 /** 1707 * <code>optional .TableName table_name = 1;</code> 1708 */ setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)1709 public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { 1710 if (tableNameBuilder_ == null) { 1711 if (value == null) { 1712 throw new NullPointerException(); 1713 } 1714 tableName_ = value; 1715 onChanged(); 1716 } else { 1717 tableNameBuilder_.setMessage(value); 1718 } 1719 bitField0_ |= 0x00000001; 1720 return this; 1721 } 1722 /** 1723 * <code>optional .TableName table_name = 1;</code> 1724 */ setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)1725 public Builder setTableName( 1726 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { 1727 if (tableNameBuilder_ == null) { 1728 tableName_ = builderForValue.build(); 1729 onChanged(); 1730 } else { 1731 tableNameBuilder_.setMessage(builderForValue.build()); 1732 } 1733 bitField0_ |= 0x00000001; 1734 return this; 1735 } 1736 /** 1737 * <code>optional .TableName table_name = 1;</code> 1738 */ mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)1739 public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { 1740 if (tableNameBuilder_ == null) { 1741 if (((bitField0_ & 0x00000001) == 0x00000001) && 1742 tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { 1743 tableName_ = 1744 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); 1745 } else { 1746 tableName_ = value; 1747 } 1748 onChanged(); 1749 } else { 1750 tableNameBuilder_.mergeFrom(value); 1751 } 1752 bitField0_ |= 0x00000001; 1753 return this; 1754 } 1755 /** 1756 * <code>optional .TableName table_name = 1;</code> 1757 */ clearTableName()1758 public Builder clearTableName() { 1759 if (tableNameBuilder_ == null) { 1760 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 1761 onChanged(); 1762 } else { 1763 tableNameBuilder_.clear(); 1764 } 1765 bitField0_ = (bitField0_ & ~0x00000001); 1766 return this; 1767 } 1768 /** 1769 * <code>optional .TableName table_name = 1;</code> 1770 */ getTableNameBuilder()1771 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { 1772 bitField0_ |= 0x00000001; 1773 onChanged(); 1774 return getTableNameFieldBuilder().getBuilder(); 1775 } 1776 /** 1777 * <code>optional .TableName table_name = 1;</code> 1778 */ getTableNameOrBuilder()1779 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { 1780 if (tableNameBuilder_ != null) { 1781 return tableNameBuilder_.getMessageOrBuilder(); 1782 } else { 1783 return tableName_; 1784 } 1785 } 1786 /** 1787 * <code>optional .TableName table_name = 1;</code> 1788 */ 1789 private com.google.protobuf.SingleFieldBuilder< 1790 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder()1791 getTableNameFieldBuilder() { 1792 if (tableNameBuilder_ == null) { 1793 tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< 1794 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( 1795 tableName_, 1796 getParentForChildren(), 1797 isClean()); 1798 tableName_ = null; 1799 } 1800 return tableNameBuilder_; 1801 } 1802 1803 // repeated .BytesBytesPair attributes = 2; 1804 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ = 1805 java.util.Collections.emptyList(); ensureAttributesIsMutable()1806 private void ensureAttributesIsMutable() { 1807 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 1808 attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_); 1809 bitField0_ |= 0x00000002; 1810 } 1811 } 1812 1813 private com.google.protobuf.RepeatedFieldBuilder< 1814 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; 1815 1816 /** 1817 * <code>repeated .BytesBytesPair attributes = 2;</code> 1818 */ getAttributesList()1819 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { 1820 if (attributesBuilder_ == null) { 1821 return java.util.Collections.unmodifiableList(attributes_); 1822 } else { 1823 return attributesBuilder_.getMessageList(); 1824 } 1825 } 1826 /** 1827 * <code>repeated .BytesBytesPair attributes = 2;</code> 1828 */ getAttributesCount()1829 public int getAttributesCount() { 1830 if (attributesBuilder_ == null) { 1831 return attributes_.size(); 1832 } else { 1833 return attributesBuilder_.getCount(); 1834 } 1835 } 1836 /** 1837 * <code>repeated .BytesBytesPair attributes = 2;</code> 1838 */ getAttributes(int index)1839 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { 1840 if (attributesBuilder_ == null) { 1841 return attributes_.get(index); 1842 } else { 1843 return attributesBuilder_.getMessage(index); 1844 } 1845 } 1846 /** 1847 * <code>repeated .BytesBytesPair attributes = 2;</code> 1848 */ setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)1849 public Builder setAttributes( 1850 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 1851 if (attributesBuilder_ == null) { 1852 if (value == null) { 1853 throw new NullPointerException(); 1854 } 1855 ensureAttributesIsMutable(); 1856 attributes_.set(index, value); 1857 onChanged(); 1858 } else { 1859 attributesBuilder_.setMessage(index, value); 1860 } 1861 return this; 1862 } 1863 /** 1864 * <code>repeated .BytesBytesPair attributes = 2;</code> 1865 */ setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)1866 public Builder setAttributes( 1867 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 1868 if (attributesBuilder_ == null) { 1869 ensureAttributesIsMutable(); 1870 attributes_.set(index, builderForValue.build()); 1871 onChanged(); 1872 } else { 1873 attributesBuilder_.setMessage(index, builderForValue.build()); 1874 } 1875 return this; 1876 } 1877 /** 1878 * <code>repeated .BytesBytesPair attributes = 2;</code> 1879 */ addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)1880 public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 1881 if (attributesBuilder_ == null) { 1882 if (value == null) { 1883 throw new NullPointerException(); 1884 } 1885 ensureAttributesIsMutable(); 1886 attributes_.add(value); 1887 onChanged(); 1888 } else { 1889 attributesBuilder_.addMessage(value); 1890 } 1891 return this; 1892 } 1893 /** 1894 * <code>repeated .BytesBytesPair attributes = 2;</code> 1895 */ addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)1896 public Builder addAttributes( 1897 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 1898 if (attributesBuilder_ == null) { 1899 if (value == null) { 1900 throw new NullPointerException(); 1901 } 1902 ensureAttributesIsMutable(); 1903 attributes_.add(index, value); 1904 onChanged(); 1905 } else { 1906 attributesBuilder_.addMessage(index, value); 1907 } 1908 return this; 1909 } 1910 /** 1911 * <code>repeated .BytesBytesPair attributes = 2;</code> 1912 */ addAttributes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)1913 public Builder addAttributes( 1914 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 1915 if (attributesBuilder_ == null) { 1916 ensureAttributesIsMutable(); 1917 attributes_.add(builderForValue.build()); 1918 onChanged(); 1919 } else { 1920 attributesBuilder_.addMessage(builderForValue.build()); 1921 } 1922 return this; 1923 } 1924 /** 1925 * <code>repeated .BytesBytesPair attributes = 2;</code> 1926 */ addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)1927 public Builder addAttributes( 1928 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 1929 if (attributesBuilder_ == null) { 1930 ensureAttributesIsMutable(); 1931 attributes_.add(index, builderForValue.build()); 1932 onChanged(); 1933 } else { 1934 attributesBuilder_.addMessage(index, builderForValue.build()); 1935 } 1936 return this; 1937 } 1938 /** 1939 * <code>repeated .BytesBytesPair attributes = 2;</code> 1940 */ addAllAttributes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values)1941 public Builder addAllAttributes( 1942 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) { 1943 if (attributesBuilder_ == null) { 1944 ensureAttributesIsMutable(); 1945 super.addAll(values, attributes_); 1946 onChanged(); 1947 } else { 1948 attributesBuilder_.addAllMessages(values); 1949 } 1950 return this; 1951 } 1952 /** 1953 * <code>repeated .BytesBytesPair attributes = 2;</code> 1954 */ clearAttributes()1955 public Builder clearAttributes() { 1956 if (attributesBuilder_ == null) { 1957 attributes_ = java.util.Collections.emptyList(); 1958 bitField0_ = (bitField0_ & ~0x00000002); 1959 onChanged(); 1960 } else { 1961 attributesBuilder_.clear(); 1962 } 1963 return this; 1964 } 1965 /** 1966 * <code>repeated .BytesBytesPair attributes = 2;</code> 1967 */ removeAttributes(int index)1968 public Builder removeAttributes(int index) { 1969 if (attributesBuilder_ == null) { 1970 ensureAttributesIsMutable(); 1971 attributes_.remove(index); 1972 onChanged(); 1973 } else { 1974 attributesBuilder_.remove(index); 1975 } 1976 return this; 1977 } 1978 /** 1979 * <code>repeated .BytesBytesPair attributes = 2;</code> 1980 */ getAttributesBuilder( int index)1981 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( 1982 int index) { 1983 return getAttributesFieldBuilder().getBuilder(index); 1984 } 1985 /** 1986 * <code>repeated .BytesBytesPair attributes = 2;</code> 1987 */ getAttributesOrBuilder( int index)1988 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( 1989 int index) { 1990 if (attributesBuilder_ == null) { 1991 return attributes_.get(index); } else { 1992 return attributesBuilder_.getMessageOrBuilder(index); 1993 } 1994 } 1995 /** 1996 * <code>repeated .BytesBytesPair attributes = 2;</code> 1997 */ 1998 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList()1999 getAttributesOrBuilderList() { 2000 if (attributesBuilder_ != null) { 2001 return attributesBuilder_.getMessageOrBuilderList(); 2002 } else { 2003 return java.util.Collections.unmodifiableList(attributes_); 2004 } 2005 } 2006 /** 2007 * <code>repeated .BytesBytesPair attributes = 2;</code> 2008 */ addAttributesBuilder()2009 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { 2010 return getAttributesFieldBuilder().addBuilder( 2011 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); 2012 } 2013 /** 2014 * <code>repeated .BytesBytesPair attributes = 2;</code> 2015 */ addAttributesBuilder( int index)2016 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( 2017 int index) { 2018 return getAttributesFieldBuilder().addBuilder( 2019 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); 2020 } 2021 /** 2022 * <code>repeated .BytesBytesPair attributes = 2;</code> 2023 */ 2024 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getAttributesBuilderList()2025 getAttributesBuilderList() { 2026 return getAttributesFieldBuilder().getBuilderList(); 2027 } 2028 private com.google.protobuf.RepeatedFieldBuilder< 2029 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder()2030 getAttributesFieldBuilder() { 2031 if (attributesBuilder_ == null) { 2032 attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 2033 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( 2034 attributes_, 2035 ((bitField0_ & 0x00000002) == 0x00000002), 2036 getParentForChildren(), 2037 isClean()); 2038 attributes_ = null; 2039 } 2040 return attributesBuilder_; 2041 } 2042 2043 // repeated .ColumnFamilySchema column_families = 3; 2044 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> columnFamilies_ = 2045 java.util.Collections.emptyList(); ensureColumnFamiliesIsMutable()2046 private void ensureColumnFamiliesIsMutable() { 2047 if (!((bitField0_ & 0x00000004) == 0x00000004)) { 2048 columnFamilies_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema>(columnFamilies_); 2049 bitField0_ |= 0x00000004; 2050 } 2051 } 2052 2053 private com.google.protobuf.RepeatedFieldBuilder< 2054 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnFamiliesBuilder_; 2055 2056 /** 2057 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2058 */ getColumnFamiliesList()2059 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> getColumnFamiliesList() { 2060 if (columnFamiliesBuilder_ == null) { 2061 return java.util.Collections.unmodifiableList(columnFamilies_); 2062 } else { 2063 return columnFamiliesBuilder_.getMessageList(); 2064 } 2065 } 2066 /** 2067 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2068 */ getColumnFamiliesCount()2069 public int getColumnFamiliesCount() { 2070 if (columnFamiliesBuilder_ == null) { 2071 return columnFamilies_.size(); 2072 } else { 2073 return columnFamiliesBuilder_.getCount(); 2074 } 2075 } 2076 /** 2077 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2078 */ getColumnFamilies(int index)2079 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnFamilies(int index) { 2080 if (columnFamiliesBuilder_ == null) { 2081 return columnFamilies_.get(index); 2082 } else { 2083 return columnFamiliesBuilder_.getMessage(index); 2084 } 2085 } 2086 /** 2087 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2088 */ setColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)2089 public Builder setColumnFamilies( 2090 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { 2091 if (columnFamiliesBuilder_ == null) { 2092 if (value == null) { 2093 throw new NullPointerException(); 2094 } 2095 ensureColumnFamiliesIsMutable(); 2096 columnFamilies_.set(index, value); 2097 onChanged(); 2098 } else { 2099 columnFamiliesBuilder_.setMessage(index, value); 2100 } 2101 return this; 2102 } 2103 /** 2104 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2105 */ setColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue)2106 public Builder setColumnFamilies( 2107 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { 2108 if (columnFamiliesBuilder_ == null) { 2109 ensureColumnFamiliesIsMutable(); 2110 columnFamilies_.set(index, builderForValue.build()); 2111 onChanged(); 2112 } else { 2113 columnFamiliesBuilder_.setMessage(index, builderForValue.build()); 2114 } 2115 return this; 2116 } 2117 /** 2118 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2119 */ addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)2120 public Builder addColumnFamilies(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { 2121 if (columnFamiliesBuilder_ == null) { 2122 if (value == null) { 2123 throw new NullPointerException(); 2124 } 2125 ensureColumnFamiliesIsMutable(); 2126 columnFamilies_.add(value); 2127 onChanged(); 2128 } else { 2129 columnFamiliesBuilder_.addMessage(value); 2130 } 2131 return this; 2132 } 2133 /** 2134 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2135 */ addColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value)2136 public Builder addColumnFamilies( 2137 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) { 2138 if (columnFamiliesBuilder_ == null) { 2139 if (value == null) { 2140 throw new NullPointerException(); 2141 } 2142 ensureColumnFamiliesIsMutable(); 2143 columnFamilies_.add(index, value); 2144 onChanged(); 2145 } else { 2146 columnFamiliesBuilder_.addMessage(index, value); 2147 } 2148 return this; 2149 } 2150 /** 2151 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2152 */ addColumnFamilies( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue)2153 public Builder addColumnFamilies( 2154 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { 2155 if (columnFamiliesBuilder_ == null) { 2156 ensureColumnFamiliesIsMutable(); 2157 columnFamilies_.add(builderForValue.build()); 2158 onChanged(); 2159 } else { 2160 columnFamiliesBuilder_.addMessage(builderForValue.build()); 2161 } 2162 return this; 2163 } 2164 /** 2165 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2166 */ addColumnFamilies( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue)2167 public Builder addColumnFamilies( 2168 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) { 2169 if (columnFamiliesBuilder_ == null) { 2170 ensureColumnFamiliesIsMutable(); 2171 columnFamilies_.add(index, builderForValue.build()); 2172 onChanged(); 2173 } else { 2174 columnFamiliesBuilder_.addMessage(index, builderForValue.build()); 2175 } 2176 return this; 2177 } 2178 /** 2179 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2180 */ addAllColumnFamilies( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> values)2181 public Builder addAllColumnFamilies( 2182 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema> values) { 2183 if (columnFamiliesBuilder_ == null) { 2184 ensureColumnFamiliesIsMutable(); 2185 super.addAll(values, columnFamilies_); 2186 onChanged(); 2187 } else { 2188 columnFamiliesBuilder_.addAllMessages(values); 2189 } 2190 return this; 2191 } 2192 /** 2193 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2194 */ clearColumnFamilies()2195 public Builder clearColumnFamilies() { 2196 if (columnFamiliesBuilder_ == null) { 2197 columnFamilies_ = java.util.Collections.emptyList(); 2198 bitField0_ = (bitField0_ & ~0x00000004); 2199 onChanged(); 2200 } else { 2201 columnFamiliesBuilder_.clear(); 2202 } 2203 return this; 2204 } 2205 /** 2206 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2207 */ removeColumnFamilies(int index)2208 public Builder removeColumnFamilies(int index) { 2209 if (columnFamiliesBuilder_ == null) { 2210 ensureColumnFamiliesIsMutable(); 2211 columnFamilies_.remove(index); 2212 onChanged(); 2213 } else { 2214 columnFamiliesBuilder_.remove(index); 2215 } 2216 return this; 2217 } 2218 /** 2219 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2220 */ getColumnFamiliesBuilder( int index)2221 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnFamiliesBuilder( 2222 int index) { 2223 return getColumnFamiliesFieldBuilder().getBuilder(index); 2224 } 2225 /** 2226 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2227 */ getColumnFamiliesOrBuilder( int index)2228 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( 2229 int index) { 2230 if (columnFamiliesBuilder_ == null) { 2231 return columnFamilies_.get(index); } else { 2232 return columnFamiliesBuilder_.getMessageOrBuilder(index); 2233 } 2234 } 2235 /** 2236 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2237 */ 2238 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesOrBuilderList()2239 getColumnFamiliesOrBuilderList() { 2240 if (columnFamiliesBuilder_ != null) { 2241 return columnFamiliesBuilder_.getMessageOrBuilderList(); 2242 } else { 2243 return java.util.Collections.unmodifiableList(columnFamilies_); 2244 } 2245 } 2246 /** 2247 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2248 */ addColumnFamiliesBuilder()2249 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder() { 2250 return getColumnFamiliesFieldBuilder().addBuilder( 2251 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); 2252 } 2253 /** 2254 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2255 */ addColumnFamiliesBuilder( int index)2256 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder addColumnFamiliesBuilder( 2257 int index) { 2258 return getColumnFamiliesFieldBuilder().addBuilder( 2259 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()); 2260 } 2261 /** 2262 * <code>repeated .ColumnFamilySchema column_families = 3;</code> 2263 */ 2264 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder> getColumnFamiliesBuilderList()2265 getColumnFamiliesBuilderList() { 2266 return getColumnFamiliesFieldBuilder().getBuilderList(); 2267 } 2268 private com.google.protobuf.RepeatedFieldBuilder< 2269 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> getColumnFamiliesFieldBuilder()2270 getColumnFamiliesFieldBuilder() { 2271 if (columnFamiliesBuilder_ == null) { 2272 columnFamiliesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 2273 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>( 2274 columnFamilies_, 2275 ((bitField0_ & 0x00000004) == 0x00000004), 2276 getParentForChildren(), 2277 isClean()); 2278 columnFamilies_ = null; 2279 } 2280 return columnFamiliesBuilder_; 2281 } 2282 2283 // repeated .NameStringPair configuration = 4; 2284 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = 2285 java.util.Collections.emptyList(); ensureConfigurationIsMutable()2286 private void ensureConfigurationIsMutable() { 2287 if (!((bitField0_ & 0x00000008) == 0x00000008)) { 2288 configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); 2289 bitField0_ |= 0x00000008; 2290 } 2291 } 2292 2293 private com.google.protobuf.RepeatedFieldBuilder< 2294 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; 2295 2296 /** 2297 * <code>repeated .NameStringPair configuration = 4;</code> 2298 */ getConfigurationList()2299 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { 2300 if (configurationBuilder_ == null) { 2301 return java.util.Collections.unmodifiableList(configuration_); 2302 } else { 2303 return configurationBuilder_.getMessageList(); 2304 } 2305 } 2306 /** 2307 * <code>repeated .NameStringPair configuration = 4;</code> 2308 */ getConfigurationCount()2309 public int getConfigurationCount() { 2310 if (configurationBuilder_ == null) { 2311 return configuration_.size(); 2312 } else { 2313 return configurationBuilder_.getCount(); 2314 } 2315 } 2316 /** 2317 * <code>repeated .NameStringPair configuration = 4;</code> 2318 */ getConfiguration(int index)2319 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { 2320 if (configurationBuilder_ == null) { 2321 return configuration_.get(index); 2322 } else { 2323 return configurationBuilder_.getMessage(index); 2324 } 2325 } 2326 /** 2327 * <code>repeated .NameStringPair configuration = 4;</code> 2328 */ setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)2329 public Builder setConfiguration( 2330 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { 2331 if (configurationBuilder_ == null) { 2332 if (value == null) { 2333 throw new NullPointerException(); 2334 } 2335 ensureConfigurationIsMutable(); 2336 configuration_.set(index, value); 2337 onChanged(); 2338 } else { 2339 configurationBuilder_.setMessage(index, value); 2340 } 2341 return this; 2342 } 2343 /** 2344 * <code>repeated .NameStringPair configuration = 4;</code> 2345 */ setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)2346 public Builder setConfiguration( 2347 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { 2348 if (configurationBuilder_ == null) { 2349 ensureConfigurationIsMutable(); 2350 configuration_.set(index, builderForValue.build()); 2351 onChanged(); 2352 } else { 2353 configurationBuilder_.setMessage(index, builderForValue.build()); 2354 } 2355 return this; 2356 } 2357 /** 2358 * <code>repeated .NameStringPair configuration = 4;</code> 2359 */ addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)2360 public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { 2361 if (configurationBuilder_ == null) { 2362 if (value == null) { 2363 throw new NullPointerException(); 2364 } 2365 ensureConfigurationIsMutable(); 2366 configuration_.add(value); 2367 onChanged(); 2368 } else { 2369 configurationBuilder_.addMessage(value); 2370 } 2371 return this; 2372 } 2373 /** 2374 * <code>repeated .NameStringPair configuration = 4;</code> 2375 */ addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)2376 public Builder addConfiguration( 2377 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { 2378 if (configurationBuilder_ == null) { 2379 if (value == null) { 2380 throw new NullPointerException(); 2381 } 2382 ensureConfigurationIsMutable(); 2383 configuration_.add(index, value); 2384 onChanged(); 2385 } else { 2386 configurationBuilder_.addMessage(index, value); 2387 } 2388 return this; 2389 } 2390 /** 2391 * <code>repeated .NameStringPair configuration = 4;</code> 2392 */ addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)2393 public Builder addConfiguration( 2394 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { 2395 if (configurationBuilder_ == null) { 2396 ensureConfigurationIsMutable(); 2397 configuration_.add(builderForValue.build()); 2398 onChanged(); 2399 } else { 2400 configurationBuilder_.addMessage(builderForValue.build()); 2401 } 2402 return this; 2403 } 2404 /** 2405 * <code>repeated .NameStringPair configuration = 4;</code> 2406 */ addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)2407 public Builder addConfiguration( 2408 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { 2409 if (configurationBuilder_ == null) { 2410 ensureConfigurationIsMutable(); 2411 configuration_.add(index, builderForValue.build()); 2412 onChanged(); 2413 } else { 2414 configurationBuilder_.addMessage(index, builderForValue.build()); 2415 } 2416 return this; 2417 } 2418 /** 2419 * <code>repeated .NameStringPair configuration = 4;</code> 2420 */ addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values)2421 public Builder addAllConfiguration( 2422 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) { 2423 if (configurationBuilder_ == null) { 2424 ensureConfigurationIsMutable(); 2425 super.addAll(values, configuration_); 2426 onChanged(); 2427 } else { 2428 configurationBuilder_.addAllMessages(values); 2429 } 2430 return this; 2431 } 2432 /** 2433 * <code>repeated .NameStringPair configuration = 4;</code> 2434 */ clearConfiguration()2435 public Builder clearConfiguration() { 2436 if (configurationBuilder_ == null) { 2437 configuration_ = java.util.Collections.emptyList(); 2438 bitField0_ = (bitField0_ & ~0x00000008); 2439 onChanged(); 2440 } else { 2441 configurationBuilder_.clear(); 2442 } 2443 return this; 2444 } 2445 /** 2446 * <code>repeated .NameStringPair configuration = 4;</code> 2447 */ removeConfiguration(int index)2448 public Builder removeConfiguration(int index) { 2449 if (configurationBuilder_ == null) { 2450 ensureConfigurationIsMutable(); 2451 configuration_.remove(index); 2452 onChanged(); 2453 } else { 2454 configurationBuilder_.remove(index); 2455 } 2456 return this; 2457 } 2458 /** 2459 * <code>repeated .NameStringPair configuration = 4;</code> 2460 */ getConfigurationBuilder( int index)2461 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( 2462 int index) { 2463 return getConfigurationFieldBuilder().getBuilder(index); 2464 } 2465 /** 2466 * <code>repeated .NameStringPair configuration = 4;</code> 2467 */ getConfigurationOrBuilder( int index)2468 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 2469 int index) { 2470 if (configurationBuilder_ == null) { 2471 return configuration_.get(index); } else { 2472 return configurationBuilder_.getMessageOrBuilder(index); 2473 } 2474 } 2475 /** 2476 * <code>repeated .NameStringPair configuration = 4;</code> 2477 */ 2478 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()2479 getConfigurationOrBuilderList() { 2480 if (configurationBuilder_ != null) { 2481 return configurationBuilder_.getMessageOrBuilderList(); 2482 } else { 2483 return java.util.Collections.unmodifiableList(configuration_); 2484 } 2485 } 2486 /** 2487 * <code>repeated .NameStringPair configuration = 4;</code> 2488 */ addConfigurationBuilder()2489 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { 2490 return getConfigurationFieldBuilder().addBuilder( 2491 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); 2492 } 2493 /** 2494 * <code>repeated .NameStringPair configuration = 4;</code> 2495 */ addConfigurationBuilder( int index)2496 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( 2497 int index) { 2498 return getConfigurationFieldBuilder().addBuilder( 2499 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); 2500 } 2501 /** 2502 * <code>repeated .NameStringPair configuration = 4;</code> 2503 */ 2504 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList()2505 getConfigurationBuilderList() { 2506 return getConfigurationFieldBuilder().getBuilderList(); 2507 } 2508 private com.google.protobuf.RepeatedFieldBuilder< 2509 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder()2510 getConfigurationFieldBuilder() { 2511 if (configurationBuilder_ == null) { 2512 configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 2513 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( 2514 configuration_, 2515 ((bitField0_ & 0x00000008) == 0x00000008), 2516 getParentForChildren(), 2517 isClean()); 2518 configuration_ = null; 2519 } 2520 return configurationBuilder_; 2521 } 2522 2523 // @@protoc_insertion_point(builder_scope:TableSchema) 2524 } 2525 2526 static { 2527 defaultInstance = new TableSchema(true); defaultInstance.initFields()2528 defaultInstance.initFields(); 2529 } 2530 2531 // @@protoc_insertion_point(class_scope:TableSchema) 2532 } 2533 2534 public interface ColumnFamilySchemaOrBuilder 2535 extends com.google.protobuf.MessageOrBuilder { 2536 2537 // required bytes name = 1; 2538 /** 2539 * <code>required bytes name = 1;</code> 2540 */ hasName()2541 boolean hasName(); 2542 /** 2543 * <code>required bytes name = 1;</code> 2544 */ getName()2545 com.google.protobuf.ByteString getName(); 2546 2547 // repeated .BytesBytesPair attributes = 2; 2548 /** 2549 * <code>repeated .BytesBytesPair attributes = 2;</code> 2550 */ 2551 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList()2552 getAttributesList(); 2553 /** 2554 * <code>repeated .BytesBytesPair attributes = 2;</code> 2555 */ getAttributes(int index)2556 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index); 2557 /** 2558 * <code>repeated .BytesBytesPair attributes = 2;</code> 2559 */ getAttributesCount()2560 int getAttributesCount(); 2561 /** 2562 * <code>repeated .BytesBytesPair attributes = 2;</code> 2563 */ 2564 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList()2565 getAttributesOrBuilderList(); 2566 /** 2567 * <code>repeated .BytesBytesPair attributes = 2;</code> 2568 */ getAttributesOrBuilder( int index)2569 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( 2570 int index); 2571 2572 // repeated .NameStringPair configuration = 3; 2573 /** 2574 * <code>repeated .NameStringPair configuration = 3;</code> 2575 */ 2576 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList()2577 getConfigurationList(); 2578 /** 2579 * <code>repeated .NameStringPair configuration = 3;</code> 2580 */ getConfiguration(int index)2581 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); 2582 /** 2583 * <code>repeated .NameStringPair configuration = 3;</code> 2584 */ getConfigurationCount()2585 int getConfigurationCount(); 2586 /** 2587 * <code>repeated .NameStringPair configuration = 3;</code> 2588 */ 2589 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()2590 getConfigurationOrBuilderList(); 2591 /** 2592 * <code>repeated .NameStringPair configuration = 3;</code> 2593 */ getConfigurationOrBuilder( int index)2594 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 2595 int index); 2596 } 2597 /** 2598 * Protobuf type {@code ColumnFamilySchema} 2599 * 2600 * <pre> 2601 ** 2602 * Column Family Schema 2603 * Inspired by the rest ColumSchemaMessage 2604 * </pre> 2605 */ 2606 public static final class ColumnFamilySchema extends 2607 com.google.protobuf.GeneratedMessage 2608 implements ColumnFamilySchemaOrBuilder { 2609 // Use ColumnFamilySchema.newBuilder() to construct. ColumnFamilySchema(com.google.protobuf.GeneratedMessage.Builder<?> builder)2610 private ColumnFamilySchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2611 super(builder); 2612 this.unknownFields = builder.getUnknownFields(); 2613 } ColumnFamilySchema(boolean noInit)2614 private ColumnFamilySchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2615 2616 private static final ColumnFamilySchema defaultInstance; getDefaultInstance()2617 public static ColumnFamilySchema getDefaultInstance() { 2618 return defaultInstance; 2619 } 2620 getDefaultInstanceForType()2621 public ColumnFamilySchema getDefaultInstanceForType() { 2622 return defaultInstance; 2623 } 2624 2625 private final com.google.protobuf.UnknownFieldSet unknownFields; 2626 @java.lang.Override 2627 public final com.google.protobuf.UnknownFieldSet getUnknownFields()2628 getUnknownFields() { 2629 return this.unknownFields; 2630 } ColumnFamilySchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2631 private ColumnFamilySchema( 2632 com.google.protobuf.CodedInputStream input, 2633 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2634 throws com.google.protobuf.InvalidProtocolBufferException { 2635 initFields(); 2636 int mutable_bitField0_ = 0; 2637 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2638 com.google.protobuf.UnknownFieldSet.newBuilder(); 2639 try { 2640 boolean done = false; 2641 while (!done) { 2642 int tag = input.readTag(); 2643 switch (tag) { 2644 case 0: 2645 done = true; 2646 break; 2647 default: { 2648 if (!parseUnknownField(input, unknownFields, 2649 extensionRegistry, tag)) { 2650 done = true; 2651 } 2652 break; 2653 } 2654 case 10: { 2655 bitField0_ |= 0x00000001; 2656 name_ = input.readBytes(); 2657 break; 2658 } 2659 case 18: { 2660 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 2661 attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(); 2662 mutable_bitField0_ |= 0x00000002; 2663 } 2664 attributes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); 2665 break; 2666 } 2667 case 26: { 2668 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 2669 configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); 2670 mutable_bitField0_ |= 0x00000004; 2671 } 2672 configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); 2673 break; 2674 } 2675 } 2676 } 2677 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2678 throw e.setUnfinishedMessage(this); 2679 } catch (java.io.IOException e) { 2680 throw new com.google.protobuf.InvalidProtocolBufferException( 2681 e.getMessage()).setUnfinishedMessage(this); 2682 } finally { 2683 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 2684 attributes_ = java.util.Collections.unmodifiableList(attributes_); 2685 } 2686 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 2687 configuration_ = java.util.Collections.unmodifiableList(configuration_); 2688 } 2689 this.unknownFields = unknownFields.build(); 2690 makeExtensionsImmutable(); 2691 } 2692 } 2693 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2694 getDescriptor() { 2695 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; 2696 } 2697 2698 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2699 internalGetFieldAccessorTable() { 2700 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable 2701 .ensureFieldAccessorsInitialized( 2702 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); 2703 } 2704 2705 public static com.google.protobuf.Parser<ColumnFamilySchema> PARSER = 2706 new com.google.protobuf.AbstractParser<ColumnFamilySchema>() { 2707 public ColumnFamilySchema parsePartialFrom( 2708 com.google.protobuf.CodedInputStream input, 2709 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2710 throws com.google.protobuf.InvalidProtocolBufferException { 2711 return new ColumnFamilySchema(input, extensionRegistry); 2712 } 2713 }; 2714 2715 @java.lang.Override getParserForType()2716 public com.google.protobuf.Parser<ColumnFamilySchema> getParserForType() { 2717 return PARSER; 2718 } 2719 2720 private int bitField0_; 2721 // required bytes name = 1; 2722 public static final int NAME_FIELD_NUMBER = 1; 2723 private com.google.protobuf.ByteString name_; 2724 /** 2725 * <code>required bytes name = 1;</code> 2726 */ hasName()2727 public boolean hasName() { 2728 return ((bitField0_ & 0x00000001) == 0x00000001); 2729 } 2730 /** 2731 * <code>required bytes name = 1;</code> 2732 */ getName()2733 public com.google.protobuf.ByteString getName() { 2734 return name_; 2735 } 2736 2737 // repeated .BytesBytesPair attributes = 2; 2738 public static final int ATTRIBUTES_FIELD_NUMBER = 2; 2739 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_; 2740 /** 2741 * <code>repeated .BytesBytesPair attributes = 2;</code> 2742 */ getAttributesList()2743 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { 2744 return attributes_; 2745 } 2746 /** 2747 * <code>repeated .BytesBytesPair attributes = 2;</code> 2748 */ 2749 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList()2750 getAttributesOrBuilderList() { 2751 return attributes_; 2752 } 2753 /** 2754 * <code>repeated .BytesBytesPair attributes = 2;</code> 2755 */ getAttributesCount()2756 public int getAttributesCount() { 2757 return attributes_.size(); 2758 } 2759 /** 2760 * <code>repeated .BytesBytesPair attributes = 2;</code> 2761 */ getAttributes(int index)2762 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { 2763 return attributes_.get(index); 2764 } 2765 /** 2766 * <code>repeated .BytesBytesPair attributes = 2;</code> 2767 */ getAttributesOrBuilder( int index)2768 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( 2769 int index) { 2770 return attributes_.get(index); 2771 } 2772 2773 // repeated .NameStringPair configuration = 3; 2774 public static final int CONFIGURATION_FIELD_NUMBER = 3; 2775 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; 2776 /** 2777 * <code>repeated .NameStringPair configuration = 3;</code> 2778 */ getConfigurationList()2779 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { 2780 return configuration_; 2781 } 2782 /** 2783 * <code>repeated .NameStringPair configuration = 3;</code> 2784 */ 2785 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()2786 getConfigurationOrBuilderList() { 2787 return configuration_; 2788 } 2789 /** 2790 * <code>repeated .NameStringPair configuration = 3;</code> 2791 */ getConfigurationCount()2792 public int getConfigurationCount() { 2793 return configuration_.size(); 2794 } 2795 /** 2796 * <code>repeated .NameStringPair configuration = 3;</code> 2797 */ getConfiguration(int index)2798 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { 2799 return configuration_.get(index); 2800 } 2801 /** 2802 * <code>repeated .NameStringPair configuration = 3;</code> 2803 */ getConfigurationOrBuilder( int index)2804 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 2805 int index) { 2806 return configuration_.get(index); 2807 } 2808 initFields()2809 private void initFields() { 2810 name_ = com.google.protobuf.ByteString.EMPTY; 2811 attributes_ = java.util.Collections.emptyList(); 2812 configuration_ = java.util.Collections.emptyList(); 2813 } 2814 private byte memoizedIsInitialized = -1; isInitialized()2815 public final boolean isInitialized() { 2816 byte isInitialized = memoizedIsInitialized; 2817 if (isInitialized != -1) return isInitialized == 1; 2818 2819 if (!hasName()) { 2820 memoizedIsInitialized = 0; 2821 return false; 2822 } 2823 for (int i = 0; i < getAttributesCount(); i++) { 2824 if (!getAttributes(i).isInitialized()) { 2825 memoizedIsInitialized = 0; 2826 return false; 2827 } 2828 } 2829 for (int i = 0; i < getConfigurationCount(); i++) { 2830 if (!getConfiguration(i).isInitialized()) { 2831 memoizedIsInitialized = 0; 2832 return false; 2833 } 2834 } 2835 memoizedIsInitialized = 1; 2836 return true; 2837 } 2838 writeTo(com.google.protobuf.CodedOutputStream output)2839 public void writeTo(com.google.protobuf.CodedOutputStream output) 2840 throws java.io.IOException { 2841 getSerializedSize(); 2842 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2843 output.writeBytes(1, name_); 2844 } 2845 for (int i = 0; i < attributes_.size(); i++) { 2846 output.writeMessage(2, attributes_.get(i)); 2847 } 2848 for (int i = 0; i < configuration_.size(); i++) { 2849 output.writeMessage(3, configuration_.get(i)); 2850 } 2851 getUnknownFields().writeTo(output); 2852 } 2853 2854 private int memoizedSerializedSize = -1; getSerializedSize()2855 public int getSerializedSize() { 2856 int size = memoizedSerializedSize; 2857 if (size != -1) return size; 2858 2859 size = 0; 2860 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2861 size += com.google.protobuf.CodedOutputStream 2862 .computeBytesSize(1, name_); 2863 } 2864 for (int i = 0; i < attributes_.size(); i++) { 2865 size += com.google.protobuf.CodedOutputStream 2866 .computeMessageSize(2, attributes_.get(i)); 2867 } 2868 for (int i = 0; i < configuration_.size(); i++) { 2869 size += com.google.protobuf.CodedOutputStream 2870 .computeMessageSize(3, configuration_.get(i)); 2871 } 2872 size += getUnknownFields().getSerializedSize(); 2873 memoizedSerializedSize = size; 2874 return size; 2875 } 2876 2877 private static final long serialVersionUID = 0L; 2878 @java.lang.Override writeReplace()2879 protected java.lang.Object writeReplace() 2880 throws java.io.ObjectStreamException { 2881 return super.writeReplace(); 2882 } 2883 2884 @java.lang.Override equals(final java.lang.Object obj)2885 public boolean equals(final java.lang.Object obj) { 2886 if (obj == this) { 2887 return true; 2888 } 2889 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)) { 2890 return super.equals(obj); 2891 } 2892 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) obj; 2893 2894 boolean result = true; 2895 result = result && (hasName() == other.hasName()); 2896 if (hasName()) { 2897 result = result && getName() 2898 .equals(other.getName()); 2899 } 2900 result = result && getAttributesList() 2901 .equals(other.getAttributesList()); 2902 result = result && getConfigurationList() 2903 .equals(other.getConfigurationList()); 2904 result = result && 2905 getUnknownFields().equals(other.getUnknownFields()); 2906 return result; 2907 } 2908 2909 private int memoizedHashCode = 0; 2910 @java.lang.Override hashCode()2911 public int hashCode() { 2912 if (memoizedHashCode != 0) { 2913 return memoizedHashCode; 2914 } 2915 int hash = 41; 2916 hash = (19 * hash) + getDescriptorForType().hashCode(); 2917 if (hasName()) { 2918 hash = (37 * hash) + NAME_FIELD_NUMBER; 2919 hash = (53 * hash) + getName().hashCode(); 2920 } 2921 if (getAttributesCount() > 0) { 2922 hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; 2923 hash = (53 * hash) + getAttributesList().hashCode(); 2924 } 2925 if (getConfigurationCount() > 0) { 2926 hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; 2927 hash = (53 * hash) + getConfigurationList().hashCode(); 2928 } 2929 hash = (29 * hash) + getUnknownFields().hashCode(); 2930 memoizedHashCode = hash; 2931 return hash; 2932 } 2933 parseFrom( com.google.protobuf.ByteString data)2934 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( 2935 com.google.protobuf.ByteString data) 2936 throws com.google.protobuf.InvalidProtocolBufferException { 2937 return PARSER.parseFrom(data); 2938 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2939 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( 2940 com.google.protobuf.ByteString data, 2941 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2942 throws com.google.protobuf.InvalidProtocolBufferException { 2943 return PARSER.parseFrom(data, extensionRegistry); 2944 } parseFrom(byte[] data)2945 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(byte[] data) 2946 throws com.google.protobuf.InvalidProtocolBufferException { 2947 return PARSER.parseFrom(data); 2948 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2949 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( 2950 byte[] data, 2951 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2952 throws com.google.protobuf.InvalidProtocolBufferException { 2953 return PARSER.parseFrom(data, extensionRegistry); 2954 } parseFrom(java.io.InputStream input)2955 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom(java.io.InputStream input) 2956 throws java.io.IOException { 2957 return PARSER.parseFrom(input); 2958 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2959 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( 2960 java.io.InputStream input, 2961 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2962 throws java.io.IOException { 2963 return PARSER.parseFrom(input, extensionRegistry); 2964 } parseDelimitedFrom(java.io.InputStream input)2965 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom(java.io.InputStream input) 2966 throws java.io.IOException { 2967 return PARSER.parseDelimitedFrom(input); 2968 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2969 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseDelimitedFrom( 2970 java.io.InputStream input, 2971 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2972 throws java.io.IOException { 2973 return PARSER.parseDelimitedFrom(input, extensionRegistry); 2974 } parseFrom( com.google.protobuf.CodedInputStream input)2975 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( 2976 com.google.protobuf.CodedInputStream input) 2977 throws java.io.IOException { 2978 return PARSER.parseFrom(input); 2979 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2980 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parseFrom( 2981 com.google.protobuf.CodedInputStream input, 2982 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2983 throws java.io.IOException { 2984 return PARSER.parseFrom(input, extensionRegistry); 2985 } 2986 newBuilder()2987 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()2988 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype)2989 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema prototype) { 2990 return newBuilder().mergeFrom(prototype); 2991 } toBuilder()2992 public Builder toBuilder() { return newBuilder(this); } 2993 2994 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2995 protected Builder newBuilderForType( 2996 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2997 Builder builder = new Builder(parent); 2998 return builder; 2999 } 3000 /** 3001 * Protobuf type {@code ColumnFamilySchema} 3002 * 3003 * <pre> 3004 ** 3005 * Column Family Schema 3006 * Inspired by the rest ColumSchemaMessage 3007 * </pre> 3008 */ 3009 public static final class Builder extends 3010 com.google.protobuf.GeneratedMessage.Builder<Builder> 3011 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder { 3012 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3013 getDescriptor() { 3014 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; 3015 } 3016 3017 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3018 internalGetFieldAccessorTable() { 3019 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_fieldAccessorTable 3020 .ensureFieldAccessorsInitialized( 3021 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); 3022 } 3023 3024 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder() Builder()3025 private Builder() { 3026 maybeForceBuilderInitialization(); 3027 } 3028 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3029 private Builder( 3030 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3031 super(parent); 3032 maybeForceBuilderInitialization(); 3033 } maybeForceBuilderInitialization()3034 private void maybeForceBuilderInitialization() { 3035 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 3036 getAttributesFieldBuilder(); 3037 getConfigurationFieldBuilder(); 3038 } 3039 } create()3040 private static Builder create() { 3041 return new Builder(); 3042 } 3043 clear()3044 public Builder clear() { 3045 super.clear(); 3046 name_ = com.google.protobuf.ByteString.EMPTY; 3047 bitField0_ = (bitField0_ & ~0x00000001); 3048 if (attributesBuilder_ == null) { 3049 attributes_ = java.util.Collections.emptyList(); 3050 bitField0_ = (bitField0_ & ~0x00000002); 3051 } else { 3052 attributesBuilder_.clear(); 3053 } 3054 if (configurationBuilder_ == null) { 3055 configuration_ = java.util.Collections.emptyList(); 3056 bitField0_ = (bitField0_ & ~0x00000004); 3057 } else { 3058 configurationBuilder_.clear(); 3059 } 3060 return this; 3061 } 3062 clone()3063 public Builder clone() { 3064 return create().mergeFrom(buildPartial()); 3065 } 3066 3067 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()3068 getDescriptorForType() { 3069 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilySchema_descriptor; 3070 } 3071 getDefaultInstanceForType()3072 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getDefaultInstanceForType() { 3073 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance(); 3074 } 3075 build()3076 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema build() { 3077 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = buildPartial(); 3078 if (!result.isInitialized()) { 3079 throw newUninitializedMessageException(result); 3080 } 3081 return result; 3082 } 3083 buildPartial()3084 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema buildPartial() { 3085 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema(this); 3086 int from_bitField0_ = bitField0_; 3087 int to_bitField0_ = 0; 3088 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 3089 to_bitField0_ |= 0x00000001; 3090 } 3091 result.name_ = name_; 3092 if (attributesBuilder_ == null) { 3093 if (((bitField0_ & 0x00000002) == 0x00000002)) { 3094 attributes_ = java.util.Collections.unmodifiableList(attributes_); 3095 bitField0_ = (bitField0_ & ~0x00000002); 3096 } 3097 result.attributes_ = attributes_; 3098 } else { 3099 result.attributes_ = attributesBuilder_.build(); 3100 } 3101 if (configurationBuilder_ == null) { 3102 if (((bitField0_ & 0x00000004) == 0x00000004)) { 3103 configuration_ = java.util.Collections.unmodifiableList(configuration_); 3104 bitField0_ = (bitField0_ & ~0x00000004); 3105 } 3106 result.configuration_ = configuration_; 3107 } else { 3108 result.configuration_ = configurationBuilder_.build(); 3109 } 3110 result.bitField0_ = to_bitField0_; 3111 onBuilt(); 3112 return result; 3113 } 3114 mergeFrom(com.google.protobuf.Message other)3115 public Builder mergeFrom(com.google.protobuf.Message other) { 3116 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) { 3117 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema)other); 3118 } else { 3119 super.mergeFrom(other); 3120 return this; 3121 } 3122 } 3123 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other)3124 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema other) { 3125 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) return this; 3126 if (other.hasName()) { 3127 setName(other.getName()); 3128 } 3129 if (attributesBuilder_ == null) { 3130 if (!other.attributes_.isEmpty()) { 3131 if (attributes_.isEmpty()) { 3132 attributes_ = other.attributes_; 3133 bitField0_ = (bitField0_ & ~0x00000002); 3134 } else { 3135 ensureAttributesIsMutable(); 3136 attributes_.addAll(other.attributes_); 3137 } 3138 onChanged(); 3139 } 3140 } else { 3141 if (!other.attributes_.isEmpty()) { 3142 if (attributesBuilder_.isEmpty()) { 3143 attributesBuilder_.dispose(); 3144 attributesBuilder_ = null; 3145 attributes_ = other.attributes_; 3146 bitField0_ = (bitField0_ & ~0x00000002); 3147 attributesBuilder_ = 3148 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3149 getAttributesFieldBuilder() : null; 3150 } else { 3151 attributesBuilder_.addAllMessages(other.attributes_); 3152 } 3153 } 3154 } 3155 if (configurationBuilder_ == null) { 3156 if (!other.configuration_.isEmpty()) { 3157 if (configuration_.isEmpty()) { 3158 configuration_ = other.configuration_; 3159 bitField0_ = (bitField0_ & ~0x00000004); 3160 } else { 3161 ensureConfigurationIsMutable(); 3162 configuration_.addAll(other.configuration_); 3163 } 3164 onChanged(); 3165 } 3166 } else { 3167 if (!other.configuration_.isEmpty()) { 3168 if (configurationBuilder_.isEmpty()) { 3169 configurationBuilder_.dispose(); 3170 configurationBuilder_ = null; 3171 configuration_ = other.configuration_; 3172 bitField0_ = (bitField0_ & ~0x00000004); 3173 configurationBuilder_ = 3174 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3175 getConfigurationFieldBuilder() : null; 3176 } else { 3177 configurationBuilder_.addAllMessages(other.configuration_); 3178 } 3179 } 3180 } 3181 this.mergeUnknownFields(other.getUnknownFields()); 3182 return this; 3183 } 3184 isInitialized()3185 public final boolean isInitialized() { 3186 if (!hasName()) { 3187 3188 return false; 3189 } 3190 for (int i = 0; i < getAttributesCount(); i++) { 3191 if (!getAttributes(i).isInitialized()) { 3192 3193 return false; 3194 } 3195 } 3196 for (int i = 0; i < getConfigurationCount(); i++) { 3197 if (!getConfiguration(i).isInitialized()) { 3198 3199 return false; 3200 } 3201 } 3202 return true; 3203 } 3204 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3205 public Builder mergeFrom( 3206 com.google.protobuf.CodedInputStream input, 3207 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3208 throws java.io.IOException { 3209 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema parsedMessage = null; 3210 try { 3211 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 3212 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3213 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema) e.getUnfinishedMessage(); 3214 throw e; 3215 } finally { 3216 if (parsedMessage != null) { 3217 mergeFrom(parsedMessage); 3218 } 3219 } 3220 return this; 3221 } 3222 private int bitField0_; 3223 3224 // required bytes name = 1; 3225 private com.google.protobuf.ByteString name_ = com.google.protobuf.ByteString.EMPTY; 3226 /** 3227 * <code>required bytes name = 1;</code> 3228 */ hasName()3229 public boolean hasName() { 3230 return ((bitField0_ & 0x00000001) == 0x00000001); 3231 } 3232 /** 3233 * <code>required bytes name = 1;</code> 3234 */ getName()3235 public com.google.protobuf.ByteString getName() { 3236 return name_; 3237 } 3238 /** 3239 * <code>required bytes name = 1;</code> 3240 */ setName(com.google.protobuf.ByteString value)3241 public Builder setName(com.google.protobuf.ByteString value) { 3242 if (value == null) { 3243 throw new NullPointerException(); 3244 } 3245 bitField0_ |= 0x00000001; 3246 name_ = value; 3247 onChanged(); 3248 return this; 3249 } 3250 /** 3251 * <code>required bytes name = 1;</code> 3252 */ clearName()3253 public Builder clearName() { 3254 bitField0_ = (bitField0_ & ~0x00000001); 3255 name_ = getDefaultInstance().getName(); 3256 onChanged(); 3257 return this; 3258 } 3259 3260 // repeated .BytesBytesPair attributes = 2; 3261 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> attributes_ = 3262 java.util.Collections.emptyList(); ensureAttributesIsMutable()3263 private void ensureAttributesIsMutable() { 3264 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 3265 attributes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(attributes_); 3266 bitField0_ |= 0x00000002; 3267 } 3268 } 3269 3270 private com.google.protobuf.RepeatedFieldBuilder< 3271 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> attributesBuilder_; 3272 3273 /** 3274 * <code>repeated .BytesBytesPair attributes = 2;</code> 3275 */ getAttributesList()3276 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getAttributesList() { 3277 if (attributesBuilder_ == null) { 3278 return java.util.Collections.unmodifiableList(attributes_); 3279 } else { 3280 return attributesBuilder_.getMessageList(); 3281 } 3282 } 3283 /** 3284 * <code>repeated .BytesBytesPair attributes = 2;</code> 3285 */ getAttributesCount()3286 public int getAttributesCount() { 3287 if (attributesBuilder_ == null) { 3288 return attributes_.size(); 3289 } else { 3290 return attributesBuilder_.getCount(); 3291 } 3292 } 3293 /** 3294 * <code>repeated .BytesBytesPair attributes = 2;</code> 3295 */ getAttributes(int index)3296 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getAttributes(int index) { 3297 if (attributesBuilder_ == null) { 3298 return attributes_.get(index); 3299 } else { 3300 return attributesBuilder_.getMessage(index); 3301 } 3302 } 3303 /** 3304 * <code>repeated .BytesBytesPair attributes = 2;</code> 3305 */ setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)3306 public Builder setAttributes( 3307 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 3308 if (attributesBuilder_ == null) { 3309 if (value == null) { 3310 throw new NullPointerException(); 3311 } 3312 ensureAttributesIsMutable(); 3313 attributes_.set(index, value); 3314 onChanged(); 3315 } else { 3316 attributesBuilder_.setMessage(index, value); 3317 } 3318 return this; 3319 } 3320 /** 3321 * <code>repeated .BytesBytesPair attributes = 2;</code> 3322 */ setAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)3323 public Builder setAttributes( 3324 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 3325 if (attributesBuilder_ == null) { 3326 ensureAttributesIsMutable(); 3327 attributes_.set(index, builderForValue.build()); 3328 onChanged(); 3329 } else { 3330 attributesBuilder_.setMessage(index, builderForValue.build()); 3331 } 3332 return this; 3333 } 3334 /** 3335 * <code>repeated .BytesBytesPair attributes = 2;</code> 3336 */ addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)3337 public Builder addAttributes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 3338 if (attributesBuilder_ == null) { 3339 if (value == null) { 3340 throw new NullPointerException(); 3341 } 3342 ensureAttributesIsMutable(); 3343 attributes_.add(value); 3344 onChanged(); 3345 } else { 3346 attributesBuilder_.addMessage(value); 3347 } 3348 return this; 3349 } 3350 /** 3351 * <code>repeated .BytesBytesPair attributes = 2;</code> 3352 */ addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)3353 public Builder addAttributes( 3354 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 3355 if (attributesBuilder_ == null) { 3356 if (value == null) { 3357 throw new NullPointerException(); 3358 } 3359 ensureAttributesIsMutable(); 3360 attributes_.add(index, value); 3361 onChanged(); 3362 } else { 3363 attributesBuilder_.addMessage(index, value); 3364 } 3365 return this; 3366 } 3367 /** 3368 * <code>repeated .BytesBytesPair attributes = 2;</code> 3369 */ addAttributes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)3370 public Builder addAttributes( 3371 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 3372 if (attributesBuilder_ == null) { 3373 ensureAttributesIsMutable(); 3374 attributes_.add(builderForValue.build()); 3375 onChanged(); 3376 } else { 3377 attributesBuilder_.addMessage(builderForValue.build()); 3378 } 3379 return this; 3380 } 3381 /** 3382 * <code>repeated .BytesBytesPair attributes = 2;</code> 3383 */ addAttributes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)3384 public Builder addAttributes( 3385 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 3386 if (attributesBuilder_ == null) { 3387 ensureAttributesIsMutable(); 3388 attributes_.add(index, builderForValue.build()); 3389 onChanged(); 3390 } else { 3391 attributesBuilder_.addMessage(index, builderForValue.build()); 3392 } 3393 return this; 3394 } 3395 /** 3396 * <code>repeated .BytesBytesPair attributes = 2;</code> 3397 */ addAllAttributes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values)3398 public Builder addAllAttributes( 3399 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) { 3400 if (attributesBuilder_ == null) { 3401 ensureAttributesIsMutable(); 3402 super.addAll(values, attributes_); 3403 onChanged(); 3404 } else { 3405 attributesBuilder_.addAllMessages(values); 3406 } 3407 return this; 3408 } 3409 /** 3410 * <code>repeated .BytesBytesPair attributes = 2;</code> 3411 */ clearAttributes()3412 public Builder clearAttributes() { 3413 if (attributesBuilder_ == null) { 3414 attributes_ = java.util.Collections.emptyList(); 3415 bitField0_ = (bitField0_ & ~0x00000002); 3416 onChanged(); 3417 } else { 3418 attributesBuilder_.clear(); 3419 } 3420 return this; 3421 } 3422 /** 3423 * <code>repeated .BytesBytesPair attributes = 2;</code> 3424 */ removeAttributes(int index)3425 public Builder removeAttributes(int index) { 3426 if (attributesBuilder_ == null) { 3427 ensureAttributesIsMutable(); 3428 attributes_.remove(index); 3429 onChanged(); 3430 } else { 3431 attributesBuilder_.remove(index); 3432 } 3433 return this; 3434 } 3435 /** 3436 * <code>repeated .BytesBytesPair attributes = 2;</code> 3437 */ getAttributesBuilder( int index)3438 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getAttributesBuilder( 3439 int index) { 3440 return getAttributesFieldBuilder().getBuilder(index); 3441 } 3442 /** 3443 * <code>repeated .BytesBytesPair attributes = 2;</code> 3444 */ getAttributesOrBuilder( int index)3445 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( 3446 int index) { 3447 if (attributesBuilder_ == null) { 3448 return attributes_.get(index); } else { 3449 return attributesBuilder_.getMessageOrBuilder(index); 3450 } 3451 } 3452 /** 3453 * <code>repeated .BytesBytesPair attributes = 2;</code> 3454 */ 3455 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesOrBuilderList()3456 getAttributesOrBuilderList() { 3457 if (attributesBuilder_ != null) { 3458 return attributesBuilder_.getMessageOrBuilderList(); 3459 } else { 3460 return java.util.Collections.unmodifiableList(attributes_); 3461 } 3462 } 3463 /** 3464 * <code>repeated .BytesBytesPair attributes = 2;</code> 3465 */ addAttributesBuilder()3466 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder() { 3467 return getAttributesFieldBuilder().addBuilder( 3468 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); 3469 } 3470 /** 3471 * <code>repeated .BytesBytesPair attributes = 2;</code> 3472 */ addAttributesBuilder( int index)3473 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addAttributesBuilder( 3474 int index) { 3475 return getAttributesFieldBuilder().addBuilder( 3476 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); 3477 } 3478 /** 3479 * <code>repeated .BytesBytesPair attributes = 2;</code> 3480 */ 3481 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getAttributesBuilderList()3482 getAttributesBuilderList() { 3483 return getAttributesFieldBuilder().getBuilderList(); 3484 } 3485 private com.google.protobuf.RepeatedFieldBuilder< 3486 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getAttributesFieldBuilder()3487 getAttributesFieldBuilder() { 3488 if (attributesBuilder_ == null) { 3489 attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 3490 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( 3491 attributes_, 3492 ((bitField0_ & 0x00000002) == 0x00000002), 3493 getParentForChildren(), 3494 isClean()); 3495 attributes_ = null; 3496 } 3497 return attributesBuilder_; 3498 } 3499 3500 // repeated .NameStringPair configuration = 3; 3501 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ = 3502 java.util.Collections.emptyList(); ensureConfigurationIsMutable()3503 private void ensureConfigurationIsMutable() { 3504 if (!((bitField0_ & 0x00000004) == 0x00000004)) { 3505 configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_); 3506 bitField0_ |= 0x00000004; 3507 } 3508 } 3509 3510 private com.google.protobuf.RepeatedFieldBuilder< 3511 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_; 3512 3513 /** 3514 * <code>repeated .NameStringPair configuration = 3;</code> 3515 */ getConfigurationList()3516 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { 3517 if (configurationBuilder_ == null) { 3518 return java.util.Collections.unmodifiableList(configuration_); 3519 } else { 3520 return configurationBuilder_.getMessageList(); 3521 } 3522 } 3523 /** 3524 * <code>repeated .NameStringPair configuration = 3;</code> 3525 */ getConfigurationCount()3526 public int getConfigurationCount() { 3527 if (configurationBuilder_ == null) { 3528 return configuration_.size(); 3529 } else { 3530 return configurationBuilder_.getCount(); 3531 } 3532 } 3533 /** 3534 * <code>repeated .NameStringPair configuration = 3;</code> 3535 */ getConfiguration(int index)3536 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { 3537 if (configurationBuilder_ == null) { 3538 return configuration_.get(index); 3539 } else { 3540 return configurationBuilder_.getMessage(index); 3541 } 3542 } 3543 /** 3544 * <code>repeated .NameStringPair configuration = 3;</code> 3545 */ setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)3546 public Builder setConfiguration( 3547 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { 3548 if (configurationBuilder_ == null) { 3549 if (value == null) { 3550 throw new NullPointerException(); 3551 } 3552 ensureConfigurationIsMutable(); 3553 configuration_.set(index, value); 3554 onChanged(); 3555 } else { 3556 configurationBuilder_.setMessage(index, value); 3557 } 3558 return this; 3559 } 3560 /** 3561 * <code>repeated .NameStringPair configuration = 3;</code> 3562 */ setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)3563 public Builder setConfiguration( 3564 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { 3565 if (configurationBuilder_ == null) { 3566 ensureConfigurationIsMutable(); 3567 configuration_.set(index, builderForValue.build()); 3568 onChanged(); 3569 } else { 3570 configurationBuilder_.setMessage(index, builderForValue.build()); 3571 } 3572 return this; 3573 } 3574 /** 3575 * <code>repeated .NameStringPair configuration = 3;</code> 3576 */ addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)3577 public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { 3578 if (configurationBuilder_ == null) { 3579 if (value == null) { 3580 throw new NullPointerException(); 3581 } 3582 ensureConfigurationIsMutable(); 3583 configuration_.add(value); 3584 onChanged(); 3585 } else { 3586 configurationBuilder_.addMessage(value); 3587 } 3588 return this; 3589 } 3590 /** 3591 * <code>repeated .NameStringPair configuration = 3;</code> 3592 */ addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)3593 public Builder addConfiguration( 3594 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) { 3595 if (configurationBuilder_ == null) { 3596 if (value == null) { 3597 throw new NullPointerException(); 3598 } 3599 ensureConfigurationIsMutable(); 3600 configuration_.add(index, value); 3601 onChanged(); 3602 } else { 3603 configurationBuilder_.addMessage(index, value); 3604 } 3605 return this; 3606 } 3607 /** 3608 * <code>repeated .NameStringPair configuration = 3;</code> 3609 */ addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)3610 public Builder addConfiguration( 3611 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { 3612 if (configurationBuilder_ == null) { 3613 ensureConfigurationIsMutable(); 3614 configuration_.add(builderForValue.build()); 3615 onChanged(); 3616 } else { 3617 configurationBuilder_.addMessage(builderForValue.build()); 3618 } 3619 return this; 3620 } 3621 /** 3622 * <code>repeated .NameStringPair configuration = 3;</code> 3623 */ addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)3624 public Builder addConfiguration( 3625 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) { 3626 if (configurationBuilder_ == null) { 3627 ensureConfigurationIsMutable(); 3628 configuration_.add(index, builderForValue.build()); 3629 onChanged(); 3630 } else { 3631 configurationBuilder_.addMessage(index, builderForValue.build()); 3632 } 3633 return this; 3634 } 3635 /** 3636 * <code>repeated .NameStringPair configuration = 3;</code> 3637 */ addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values)3638 public Builder addAllConfiguration( 3639 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) { 3640 if (configurationBuilder_ == null) { 3641 ensureConfigurationIsMutable(); 3642 super.addAll(values, configuration_); 3643 onChanged(); 3644 } else { 3645 configurationBuilder_.addAllMessages(values); 3646 } 3647 return this; 3648 } 3649 /** 3650 * <code>repeated .NameStringPair configuration = 3;</code> 3651 */ clearConfiguration()3652 public Builder clearConfiguration() { 3653 if (configurationBuilder_ == null) { 3654 configuration_ = java.util.Collections.emptyList(); 3655 bitField0_ = (bitField0_ & ~0x00000004); 3656 onChanged(); 3657 } else { 3658 configurationBuilder_.clear(); 3659 } 3660 return this; 3661 } 3662 /** 3663 * <code>repeated .NameStringPair configuration = 3;</code> 3664 */ removeConfiguration(int index)3665 public Builder removeConfiguration(int index) { 3666 if (configurationBuilder_ == null) { 3667 ensureConfigurationIsMutable(); 3668 configuration_.remove(index); 3669 onChanged(); 3670 } else { 3671 configurationBuilder_.remove(index); 3672 } 3673 return this; 3674 } 3675 /** 3676 * <code>repeated .NameStringPair configuration = 3;</code> 3677 */ getConfigurationBuilder( int index)3678 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder( 3679 int index) { 3680 return getConfigurationFieldBuilder().getBuilder(index); 3681 } 3682 /** 3683 * <code>repeated .NameStringPair configuration = 3;</code> 3684 */ getConfigurationOrBuilder( int index)3685 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 3686 int index) { 3687 if (configurationBuilder_ == null) { 3688 return configuration_.get(index); } else { 3689 return configurationBuilder_.getMessageOrBuilder(index); 3690 } 3691 } 3692 /** 3693 * <code>repeated .NameStringPair configuration = 3;</code> 3694 */ 3695 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()3696 getConfigurationOrBuilderList() { 3697 if (configurationBuilder_ != null) { 3698 return configurationBuilder_.getMessageOrBuilderList(); 3699 } else { 3700 return java.util.Collections.unmodifiableList(configuration_); 3701 } 3702 } 3703 /** 3704 * <code>repeated .NameStringPair configuration = 3;</code> 3705 */ addConfigurationBuilder()3706 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() { 3707 return getConfigurationFieldBuilder().addBuilder( 3708 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); 3709 } 3710 /** 3711 * <code>repeated .NameStringPair configuration = 3;</code> 3712 */ addConfigurationBuilder( int index)3713 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder( 3714 int index) { 3715 return getConfigurationFieldBuilder().addBuilder( 3716 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()); 3717 } 3718 /** 3719 * <code>repeated .NameStringPair configuration = 3;</code> 3720 */ 3721 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder> getConfigurationBuilderList()3722 getConfigurationBuilderList() { 3723 return getConfigurationFieldBuilder().getBuilderList(); 3724 } 3725 private com.google.protobuf.RepeatedFieldBuilder< 3726 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationFieldBuilder()3727 getConfigurationFieldBuilder() { 3728 if (configurationBuilder_ == null) { 3729 configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 3730 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>( 3731 configuration_, 3732 ((bitField0_ & 0x00000004) == 0x00000004), 3733 getParentForChildren(), 3734 isClean()); 3735 configuration_ = null; 3736 } 3737 return configurationBuilder_; 3738 } 3739 3740 // @@protoc_insertion_point(builder_scope:ColumnFamilySchema) 3741 } 3742 3743 static { 3744 defaultInstance = new ColumnFamilySchema(true); defaultInstance.initFields()3745 defaultInstance.initFields(); 3746 } 3747 3748 // @@protoc_insertion_point(class_scope:ColumnFamilySchema) 3749 } 3750 3751 public interface RegionInfoOrBuilder 3752 extends com.google.protobuf.MessageOrBuilder { 3753 3754 // required uint64 region_id = 1; 3755 /** 3756 * <code>required uint64 region_id = 1;</code> 3757 */ hasRegionId()3758 boolean hasRegionId(); 3759 /** 3760 * <code>required uint64 region_id = 1;</code> 3761 */ getRegionId()3762 long getRegionId(); 3763 3764 // required .TableName table_name = 2; 3765 /** 3766 * <code>required .TableName table_name = 2;</code> 3767 */ hasTableName()3768 boolean hasTableName(); 3769 /** 3770 * <code>required .TableName table_name = 2;</code> 3771 */ getTableName()3772 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName(); 3773 /** 3774 * <code>required .TableName table_name = 2;</code> 3775 */ getTableNameOrBuilder()3776 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); 3777 3778 // optional bytes start_key = 3; 3779 /** 3780 * <code>optional bytes start_key = 3;</code> 3781 */ hasStartKey()3782 boolean hasStartKey(); 3783 /** 3784 * <code>optional bytes start_key = 3;</code> 3785 */ getStartKey()3786 com.google.protobuf.ByteString getStartKey(); 3787 3788 // optional bytes end_key = 4; 3789 /** 3790 * <code>optional bytes end_key = 4;</code> 3791 */ hasEndKey()3792 boolean hasEndKey(); 3793 /** 3794 * <code>optional bytes end_key = 4;</code> 3795 */ getEndKey()3796 com.google.protobuf.ByteString getEndKey(); 3797 3798 // optional bool offline = 5; 3799 /** 3800 * <code>optional bool offline = 5;</code> 3801 */ hasOffline()3802 boolean hasOffline(); 3803 /** 3804 * <code>optional bool offline = 5;</code> 3805 */ getOffline()3806 boolean getOffline(); 3807 3808 // optional bool split = 6; 3809 /** 3810 * <code>optional bool split = 6;</code> 3811 */ hasSplit()3812 boolean hasSplit(); 3813 /** 3814 * <code>optional bool split = 6;</code> 3815 */ getSplit()3816 boolean getSplit(); 3817 3818 // optional int32 replica_id = 7 [default = 0]; 3819 /** 3820 * <code>optional int32 replica_id = 7 [default = 0];</code> 3821 */ hasReplicaId()3822 boolean hasReplicaId(); 3823 /** 3824 * <code>optional int32 replica_id = 7 [default = 0];</code> 3825 */ getReplicaId()3826 int getReplicaId(); 3827 } 3828 /** 3829 * Protobuf type {@code RegionInfo} 3830 * 3831 * <pre> 3832 ** 3833 * Protocol buffer version of HRegionInfo. 3834 * </pre> 3835 */ 3836 public static final class RegionInfo extends 3837 com.google.protobuf.GeneratedMessage 3838 implements RegionInfoOrBuilder { 3839 // Use RegionInfo.newBuilder() to construct. RegionInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder)3840 private RegionInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 3841 super(builder); 3842 this.unknownFields = builder.getUnknownFields(); 3843 } RegionInfo(boolean noInit)3844 private RegionInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 3845 3846 private static final RegionInfo defaultInstance; getDefaultInstance()3847 public static RegionInfo getDefaultInstance() { 3848 return defaultInstance; 3849 } 3850 getDefaultInstanceForType()3851 public RegionInfo getDefaultInstanceForType() { 3852 return defaultInstance; 3853 } 3854 3855 private final com.google.protobuf.UnknownFieldSet unknownFields; 3856 @java.lang.Override 3857 public final com.google.protobuf.UnknownFieldSet getUnknownFields()3858 getUnknownFields() { 3859 return this.unknownFields; 3860 } RegionInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3861 private RegionInfo( 3862 com.google.protobuf.CodedInputStream input, 3863 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3864 throws com.google.protobuf.InvalidProtocolBufferException { 3865 initFields(); 3866 int mutable_bitField0_ = 0; 3867 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 3868 com.google.protobuf.UnknownFieldSet.newBuilder(); 3869 try { 3870 boolean done = false; 3871 while (!done) { 3872 int tag = input.readTag(); 3873 switch (tag) { 3874 case 0: 3875 done = true; 3876 break; 3877 default: { 3878 if (!parseUnknownField(input, unknownFields, 3879 extensionRegistry, tag)) { 3880 done = true; 3881 } 3882 break; 3883 } 3884 case 8: { 3885 bitField0_ |= 0x00000001; 3886 regionId_ = input.readUInt64(); 3887 break; 3888 } 3889 case 18: { 3890 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; 3891 if (((bitField0_ & 0x00000002) == 0x00000002)) { 3892 subBuilder = tableName_.toBuilder(); 3893 } 3894 tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); 3895 if (subBuilder != null) { 3896 subBuilder.mergeFrom(tableName_); 3897 tableName_ = subBuilder.buildPartial(); 3898 } 3899 bitField0_ |= 0x00000002; 3900 break; 3901 } 3902 case 26: { 3903 bitField0_ |= 0x00000004; 3904 startKey_ = input.readBytes(); 3905 break; 3906 } 3907 case 34: { 3908 bitField0_ |= 0x00000008; 3909 endKey_ = input.readBytes(); 3910 break; 3911 } 3912 case 40: { 3913 bitField0_ |= 0x00000010; 3914 offline_ = input.readBool(); 3915 break; 3916 } 3917 case 48: { 3918 bitField0_ |= 0x00000020; 3919 split_ = input.readBool(); 3920 break; 3921 } 3922 case 56: { 3923 bitField0_ |= 0x00000040; 3924 replicaId_ = input.readInt32(); 3925 break; 3926 } 3927 } 3928 } 3929 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3930 throw e.setUnfinishedMessage(this); 3931 } catch (java.io.IOException e) { 3932 throw new com.google.protobuf.InvalidProtocolBufferException( 3933 e.getMessage()).setUnfinishedMessage(this); 3934 } finally { 3935 this.unknownFields = unknownFields.build(); 3936 makeExtensionsImmutable(); 3937 } 3938 } 3939 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3940 getDescriptor() { 3941 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; 3942 } 3943 3944 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3945 internalGetFieldAccessorTable() { 3946 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable 3947 .ensureFieldAccessorsInitialized( 3948 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); 3949 } 3950 3951 public static com.google.protobuf.Parser<RegionInfo> PARSER = 3952 new com.google.protobuf.AbstractParser<RegionInfo>() { 3953 public RegionInfo parsePartialFrom( 3954 com.google.protobuf.CodedInputStream input, 3955 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3956 throws com.google.protobuf.InvalidProtocolBufferException { 3957 return new RegionInfo(input, extensionRegistry); 3958 } 3959 }; 3960 3961 @java.lang.Override getParserForType()3962 public com.google.protobuf.Parser<RegionInfo> getParserForType() { 3963 return PARSER; 3964 } 3965 3966 private int bitField0_; 3967 // required uint64 region_id = 1; 3968 public static final int REGION_ID_FIELD_NUMBER = 1; 3969 private long regionId_; 3970 /** 3971 * <code>required uint64 region_id = 1;</code> 3972 */ hasRegionId()3973 public boolean hasRegionId() { 3974 return ((bitField0_ & 0x00000001) == 0x00000001); 3975 } 3976 /** 3977 * <code>required uint64 region_id = 1;</code> 3978 */ getRegionId()3979 public long getRegionId() { 3980 return regionId_; 3981 } 3982 3983 // required .TableName table_name = 2; 3984 public static final int TABLE_NAME_FIELD_NUMBER = 2; 3985 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_; 3986 /** 3987 * <code>required .TableName table_name = 2;</code> 3988 */ hasTableName()3989 public boolean hasTableName() { 3990 return ((bitField0_ & 0x00000002) == 0x00000002); 3991 } 3992 /** 3993 * <code>required .TableName table_name = 2;</code> 3994 */ getTableName()3995 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { 3996 return tableName_; 3997 } 3998 /** 3999 * <code>required .TableName table_name = 2;</code> 4000 */ getTableNameOrBuilder()4001 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { 4002 return tableName_; 4003 } 4004 4005 // optional bytes start_key = 3; 4006 public static final int START_KEY_FIELD_NUMBER = 3; 4007 private com.google.protobuf.ByteString startKey_; 4008 /** 4009 * <code>optional bytes start_key = 3;</code> 4010 */ hasStartKey()4011 public boolean hasStartKey() { 4012 return ((bitField0_ & 0x00000004) == 0x00000004); 4013 } 4014 /** 4015 * <code>optional bytes start_key = 3;</code> 4016 */ getStartKey()4017 public com.google.protobuf.ByteString getStartKey() { 4018 return startKey_; 4019 } 4020 4021 // optional bytes end_key = 4; 4022 public static final int END_KEY_FIELD_NUMBER = 4; 4023 private com.google.protobuf.ByteString endKey_; 4024 /** 4025 * <code>optional bytes end_key = 4;</code> 4026 */ hasEndKey()4027 public boolean hasEndKey() { 4028 return ((bitField0_ & 0x00000008) == 0x00000008); 4029 } 4030 /** 4031 * <code>optional bytes end_key = 4;</code> 4032 */ getEndKey()4033 public com.google.protobuf.ByteString getEndKey() { 4034 return endKey_; 4035 } 4036 4037 // optional bool offline = 5; 4038 public static final int OFFLINE_FIELD_NUMBER = 5; 4039 private boolean offline_; 4040 /** 4041 * <code>optional bool offline = 5;</code> 4042 */ hasOffline()4043 public boolean hasOffline() { 4044 return ((bitField0_ & 0x00000010) == 0x00000010); 4045 } 4046 /** 4047 * <code>optional bool offline = 5;</code> 4048 */ getOffline()4049 public boolean getOffline() { 4050 return offline_; 4051 } 4052 4053 // optional bool split = 6; 4054 public static final int SPLIT_FIELD_NUMBER = 6; 4055 private boolean split_; 4056 /** 4057 * <code>optional bool split = 6;</code> 4058 */ hasSplit()4059 public boolean hasSplit() { 4060 return ((bitField0_ & 0x00000020) == 0x00000020); 4061 } 4062 /** 4063 * <code>optional bool split = 6;</code> 4064 */ getSplit()4065 public boolean getSplit() { 4066 return split_; 4067 } 4068 4069 // optional int32 replica_id = 7 [default = 0]; 4070 public static final int REPLICA_ID_FIELD_NUMBER = 7; 4071 private int replicaId_; 4072 /** 4073 * <code>optional int32 replica_id = 7 [default = 0];</code> 4074 */ hasReplicaId()4075 public boolean hasReplicaId() { 4076 return ((bitField0_ & 0x00000040) == 0x00000040); 4077 } 4078 /** 4079 * <code>optional int32 replica_id = 7 [default = 0];</code> 4080 */ getReplicaId()4081 public int getReplicaId() { 4082 return replicaId_; 4083 } 4084 initFields()4085 private void initFields() { 4086 regionId_ = 0L; 4087 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 4088 startKey_ = com.google.protobuf.ByteString.EMPTY; 4089 endKey_ = com.google.protobuf.ByteString.EMPTY; 4090 offline_ = false; 4091 split_ = false; 4092 replicaId_ = 0; 4093 } 4094 private byte memoizedIsInitialized = -1; isInitialized()4095 public final boolean isInitialized() { 4096 byte isInitialized = memoizedIsInitialized; 4097 if (isInitialized != -1) return isInitialized == 1; 4098 4099 if (!hasRegionId()) { 4100 memoizedIsInitialized = 0; 4101 return false; 4102 } 4103 if (!hasTableName()) { 4104 memoizedIsInitialized = 0; 4105 return false; 4106 } 4107 if (!getTableName().isInitialized()) { 4108 memoizedIsInitialized = 0; 4109 return false; 4110 } 4111 memoizedIsInitialized = 1; 4112 return true; 4113 } 4114 writeTo(com.google.protobuf.CodedOutputStream output)4115 public void writeTo(com.google.protobuf.CodedOutputStream output) 4116 throws java.io.IOException { 4117 getSerializedSize(); 4118 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4119 output.writeUInt64(1, regionId_); 4120 } 4121 if (((bitField0_ & 0x00000002) == 0x00000002)) { 4122 output.writeMessage(2, tableName_); 4123 } 4124 if (((bitField0_ & 0x00000004) == 0x00000004)) { 4125 output.writeBytes(3, startKey_); 4126 } 4127 if (((bitField0_ & 0x00000008) == 0x00000008)) { 4128 output.writeBytes(4, endKey_); 4129 } 4130 if (((bitField0_ & 0x00000010) == 0x00000010)) { 4131 output.writeBool(5, offline_); 4132 } 4133 if (((bitField0_ & 0x00000020) == 0x00000020)) { 4134 output.writeBool(6, split_); 4135 } 4136 if (((bitField0_ & 0x00000040) == 0x00000040)) { 4137 output.writeInt32(7, replicaId_); 4138 } 4139 getUnknownFields().writeTo(output); 4140 } 4141 4142 private int memoizedSerializedSize = -1; getSerializedSize()4143 public int getSerializedSize() { 4144 int size = memoizedSerializedSize; 4145 if (size != -1) return size; 4146 4147 size = 0; 4148 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4149 size += com.google.protobuf.CodedOutputStream 4150 .computeUInt64Size(1, regionId_); 4151 } 4152 if (((bitField0_ & 0x00000002) == 0x00000002)) { 4153 size += com.google.protobuf.CodedOutputStream 4154 .computeMessageSize(2, tableName_); 4155 } 4156 if (((bitField0_ & 0x00000004) == 0x00000004)) { 4157 size += com.google.protobuf.CodedOutputStream 4158 .computeBytesSize(3, startKey_); 4159 } 4160 if (((bitField0_ & 0x00000008) == 0x00000008)) { 4161 size += com.google.protobuf.CodedOutputStream 4162 .computeBytesSize(4, endKey_); 4163 } 4164 if (((bitField0_ & 0x00000010) == 0x00000010)) { 4165 size += com.google.protobuf.CodedOutputStream 4166 .computeBoolSize(5, offline_); 4167 } 4168 if (((bitField0_ & 0x00000020) == 0x00000020)) { 4169 size += com.google.protobuf.CodedOutputStream 4170 .computeBoolSize(6, split_); 4171 } 4172 if (((bitField0_ & 0x00000040) == 0x00000040)) { 4173 size += com.google.protobuf.CodedOutputStream 4174 .computeInt32Size(7, replicaId_); 4175 } 4176 size += getUnknownFields().getSerializedSize(); 4177 memoizedSerializedSize = size; 4178 return size; 4179 } 4180 4181 private static final long serialVersionUID = 0L; 4182 @java.lang.Override writeReplace()4183 protected java.lang.Object writeReplace() 4184 throws java.io.ObjectStreamException { 4185 return super.writeReplace(); 4186 } 4187 4188 @java.lang.Override equals(final java.lang.Object obj)4189 public boolean equals(final java.lang.Object obj) { 4190 if (obj == this) { 4191 return true; 4192 } 4193 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)) { 4194 return super.equals(obj); 4195 } 4196 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) obj; 4197 4198 boolean result = true; 4199 result = result && (hasRegionId() == other.hasRegionId()); 4200 if (hasRegionId()) { 4201 result = result && (getRegionId() 4202 == other.getRegionId()); 4203 } 4204 result = result && (hasTableName() == other.hasTableName()); 4205 if (hasTableName()) { 4206 result = result && getTableName() 4207 .equals(other.getTableName()); 4208 } 4209 result = result && (hasStartKey() == other.hasStartKey()); 4210 if (hasStartKey()) { 4211 result = result && getStartKey() 4212 .equals(other.getStartKey()); 4213 } 4214 result = result && (hasEndKey() == other.hasEndKey()); 4215 if (hasEndKey()) { 4216 result = result && getEndKey() 4217 .equals(other.getEndKey()); 4218 } 4219 result = result && (hasOffline() == other.hasOffline()); 4220 if (hasOffline()) { 4221 result = result && (getOffline() 4222 == other.getOffline()); 4223 } 4224 result = result && (hasSplit() == other.hasSplit()); 4225 if (hasSplit()) { 4226 result = result && (getSplit() 4227 == other.getSplit()); 4228 } 4229 result = result && (hasReplicaId() == other.hasReplicaId()); 4230 if (hasReplicaId()) { 4231 result = result && (getReplicaId() 4232 == other.getReplicaId()); 4233 } 4234 result = result && 4235 getUnknownFields().equals(other.getUnknownFields()); 4236 return result; 4237 } 4238 4239 private int memoizedHashCode = 0; 4240 @java.lang.Override hashCode()4241 public int hashCode() { 4242 if (memoizedHashCode != 0) { 4243 return memoizedHashCode; 4244 } 4245 int hash = 41; 4246 hash = (19 * hash) + getDescriptorForType().hashCode(); 4247 if (hasRegionId()) { 4248 hash = (37 * hash) + REGION_ID_FIELD_NUMBER; 4249 hash = (53 * hash) + hashLong(getRegionId()); 4250 } 4251 if (hasTableName()) { 4252 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; 4253 hash = (53 * hash) + getTableName().hashCode(); 4254 } 4255 if (hasStartKey()) { 4256 hash = (37 * hash) + START_KEY_FIELD_NUMBER; 4257 hash = (53 * hash) + getStartKey().hashCode(); 4258 } 4259 if (hasEndKey()) { 4260 hash = (37 * hash) + END_KEY_FIELD_NUMBER; 4261 hash = (53 * hash) + getEndKey().hashCode(); 4262 } 4263 if (hasOffline()) { 4264 hash = (37 * hash) + OFFLINE_FIELD_NUMBER; 4265 hash = (53 * hash) + hashBoolean(getOffline()); 4266 } 4267 if (hasSplit()) { 4268 hash = (37 * hash) + SPLIT_FIELD_NUMBER; 4269 hash = (53 * hash) + hashBoolean(getSplit()); 4270 } 4271 if (hasReplicaId()) { 4272 hash = (37 * hash) + REPLICA_ID_FIELD_NUMBER; 4273 hash = (53 * hash) + getReplicaId(); 4274 } 4275 hash = (29 * hash) + getUnknownFields().hashCode(); 4276 memoizedHashCode = hash; 4277 return hash; 4278 } 4279 parseFrom( com.google.protobuf.ByteString data)4280 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( 4281 com.google.protobuf.ByteString data) 4282 throws com.google.protobuf.InvalidProtocolBufferException { 4283 return PARSER.parseFrom(data); 4284 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4285 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( 4286 com.google.protobuf.ByteString data, 4287 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4288 throws com.google.protobuf.InvalidProtocolBufferException { 4289 return PARSER.parseFrom(data, extensionRegistry); 4290 } parseFrom(byte[] data)4291 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) 4292 throws com.google.protobuf.InvalidProtocolBufferException { 4293 return PARSER.parseFrom(data); 4294 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4295 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( 4296 byte[] data, 4297 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4298 throws com.google.protobuf.InvalidProtocolBufferException { 4299 return PARSER.parseFrom(data, extensionRegistry); 4300 } parseFrom(java.io.InputStream input)4301 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) 4302 throws java.io.IOException { 4303 return PARSER.parseFrom(input); 4304 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4305 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( 4306 java.io.InputStream input, 4307 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4308 throws java.io.IOException { 4309 return PARSER.parseFrom(input, extensionRegistry); 4310 } parseDelimitedFrom(java.io.InputStream input)4311 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) 4312 throws java.io.IOException { 4313 return PARSER.parseDelimitedFrom(input); 4314 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4315 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( 4316 java.io.InputStream input, 4317 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4318 throws java.io.IOException { 4319 return PARSER.parseDelimitedFrom(input, extensionRegistry); 4320 } parseFrom( com.google.protobuf.CodedInputStream input)4321 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( 4322 com.google.protobuf.CodedInputStream input) 4323 throws java.io.IOException { 4324 return PARSER.parseFrom(input); 4325 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4326 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( 4327 com.google.protobuf.CodedInputStream input, 4328 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4329 throws java.io.IOException { 4330 return PARSER.parseFrom(input, extensionRegistry); 4331 } 4332 newBuilder()4333 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()4334 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype)4335 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype) { 4336 return newBuilder().mergeFrom(prototype); 4337 } toBuilder()4338 public Builder toBuilder() { return newBuilder(this); } 4339 4340 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4341 protected Builder newBuilderForType( 4342 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4343 Builder builder = new Builder(parent); 4344 return builder; 4345 } 4346 /** 4347 * Protobuf type {@code RegionInfo} 4348 * 4349 * <pre> 4350 ** 4351 * Protocol buffer version of HRegionInfo. 4352 * </pre> 4353 */ 4354 public static final class Builder extends 4355 com.google.protobuf.GeneratedMessage.Builder<Builder> 4356 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { 4357 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4358 getDescriptor() { 4359 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; 4360 } 4361 4362 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4363 internalGetFieldAccessorTable() { 4364 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable 4365 .ensureFieldAccessorsInitialized( 4366 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); 4367 } 4368 4369 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() Builder()4370 private Builder() { 4371 maybeForceBuilderInitialization(); 4372 } 4373 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4374 private Builder( 4375 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4376 super(parent); 4377 maybeForceBuilderInitialization(); 4378 } maybeForceBuilderInitialization()4379 private void maybeForceBuilderInitialization() { 4380 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 4381 getTableNameFieldBuilder(); 4382 } 4383 } create()4384 private static Builder create() { 4385 return new Builder(); 4386 } 4387 clear()4388 public Builder clear() { 4389 super.clear(); 4390 regionId_ = 0L; 4391 bitField0_ = (bitField0_ & ~0x00000001); 4392 if (tableNameBuilder_ == null) { 4393 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 4394 } else { 4395 tableNameBuilder_.clear(); 4396 } 4397 bitField0_ = (bitField0_ & ~0x00000002); 4398 startKey_ = com.google.protobuf.ByteString.EMPTY; 4399 bitField0_ = (bitField0_ & ~0x00000004); 4400 endKey_ = com.google.protobuf.ByteString.EMPTY; 4401 bitField0_ = (bitField0_ & ~0x00000008); 4402 offline_ = false; 4403 bitField0_ = (bitField0_ & ~0x00000010); 4404 split_ = false; 4405 bitField0_ = (bitField0_ & ~0x00000020); 4406 replicaId_ = 0; 4407 bitField0_ = (bitField0_ & ~0x00000040); 4408 return this; 4409 } 4410 clone()4411 public Builder clone() { 4412 return create().mergeFrom(buildPartial()); 4413 } 4414 4415 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()4416 getDescriptorForType() { 4417 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; 4418 } 4419 getDefaultInstanceForType()4420 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { 4421 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 4422 } 4423 build()4424 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo build() { 4425 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); 4426 if (!result.isInitialized()) { 4427 throw newUninitializedMessageException(result); 4428 } 4429 return result; 4430 } 4431 buildPartial()4432 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { 4433 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo(this); 4434 int from_bitField0_ = bitField0_; 4435 int to_bitField0_ = 0; 4436 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 4437 to_bitField0_ |= 0x00000001; 4438 } 4439 result.regionId_ = regionId_; 4440 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 4441 to_bitField0_ |= 0x00000002; 4442 } 4443 if (tableNameBuilder_ == null) { 4444 result.tableName_ = tableName_; 4445 } else { 4446 result.tableName_ = tableNameBuilder_.build(); 4447 } 4448 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 4449 to_bitField0_ |= 0x00000004; 4450 } 4451 result.startKey_ = startKey_; 4452 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 4453 to_bitField0_ |= 0x00000008; 4454 } 4455 result.endKey_ = endKey_; 4456 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 4457 to_bitField0_ |= 0x00000010; 4458 } 4459 result.offline_ = offline_; 4460 if (((from_bitField0_ & 0x00000020) == 0x00000020)) { 4461 to_bitField0_ |= 0x00000020; 4462 } 4463 result.split_ = split_; 4464 if (((from_bitField0_ & 0x00000040) == 0x00000040)) { 4465 to_bitField0_ |= 0x00000040; 4466 } 4467 result.replicaId_ = replicaId_; 4468 result.bitField0_ = to_bitField0_; 4469 onBuilt(); 4470 return result; 4471 } 4472 mergeFrom(com.google.protobuf.Message other)4473 public Builder mergeFrom(com.google.protobuf.Message other) { 4474 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) { 4475 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)other); 4476 } else { 4477 super.mergeFrom(other); 4478 return this; 4479 } 4480 } 4481 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other)4482 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other) { 4483 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; 4484 if (other.hasRegionId()) { 4485 setRegionId(other.getRegionId()); 4486 } 4487 if (other.hasTableName()) { 4488 mergeTableName(other.getTableName()); 4489 } 4490 if (other.hasStartKey()) { 4491 setStartKey(other.getStartKey()); 4492 } 4493 if (other.hasEndKey()) { 4494 setEndKey(other.getEndKey()); 4495 } 4496 if (other.hasOffline()) { 4497 setOffline(other.getOffline()); 4498 } 4499 if (other.hasSplit()) { 4500 setSplit(other.getSplit()); 4501 } 4502 if (other.hasReplicaId()) { 4503 setReplicaId(other.getReplicaId()); 4504 } 4505 this.mergeUnknownFields(other.getUnknownFields()); 4506 return this; 4507 } 4508 isInitialized()4509 public final boolean isInitialized() { 4510 if (!hasRegionId()) { 4511 4512 return false; 4513 } 4514 if (!hasTableName()) { 4515 4516 return false; 4517 } 4518 if (!getTableName().isInitialized()) { 4519 4520 return false; 4521 } 4522 return true; 4523 } 4524 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4525 public Builder mergeFrom( 4526 com.google.protobuf.CodedInputStream input, 4527 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4528 throws java.io.IOException { 4529 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parsedMessage = null; 4530 try { 4531 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 4532 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4533 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) e.getUnfinishedMessage(); 4534 throw e; 4535 } finally { 4536 if (parsedMessage != null) { 4537 mergeFrom(parsedMessage); 4538 } 4539 } 4540 return this; 4541 } 4542 private int bitField0_; 4543 4544 // required uint64 region_id = 1; 4545 private long regionId_ ; 4546 /** 4547 * <code>required uint64 region_id = 1;</code> 4548 */ hasRegionId()4549 public boolean hasRegionId() { 4550 return ((bitField0_ & 0x00000001) == 0x00000001); 4551 } 4552 /** 4553 * <code>required uint64 region_id = 1;</code> 4554 */ getRegionId()4555 public long getRegionId() { 4556 return regionId_; 4557 } 4558 /** 4559 * <code>required uint64 region_id = 1;</code> 4560 */ setRegionId(long value)4561 public Builder setRegionId(long value) { 4562 bitField0_ |= 0x00000001; 4563 regionId_ = value; 4564 onChanged(); 4565 return this; 4566 } 4567 /** 4568 * <code>required uint64 region_id = 1;</code> 4569 */ clearRegionId()4570 public Builder clearRegionId() { 4571 bitField0_ = (bitField0_ & ~0x00000001); 4572 regionId_ = 0L; 4573 onChanged(); 4574 return this; 4575 } 4576 4577 // required .TableName table_name = 2; 4578 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 4579 private com.google.protobuf.SingleFieldBuilder< 4580 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; 4581 /** 4582 * <code>required .TableName table_name = 2;</code> 4583 */ hasTableName()4584 public boolean hasTableName() { 4585 return ((bitField0_ & 0x00000002) == 0x00000002); 4586 } 4587 /** 4588 * <code>required .TableName table_name = 2;</code> 4589 */ getTableName()4590 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() { 4591 if (tableNameBuilder_ == null) { 4592 return tableName_; 4593 } else { 4594 return tableNameBuilder_.getMessage(); 4595 } 4596 } 4597 /** 4598 * <code>required .TableName table_name = 2;</code> 4599 */ setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)4600 public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { 4601 if (tableNameBuilder_ == null) { 4602 if (value == null) { 4603 throw new NullPointerException(); 4604 } 4605 tableName_ = value; 4606 onChanged(); 4607 } else { 4608 tableNameBuilder_.setMessage(value); 4609 } 4610 bitField0_ |= 0x00000002; 4611 return this; 4612 } 4613 /** 4614 * <code>required .TableName table_name = 2;</code> 4615 */ setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)4616 public Builder setTableName( 4617 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { 4618 if (tableNameBuilder_ == null) { 4619 tableName_ = builderForValue.build(); 4620 onChanged(); 4621 } else { 4622 tableNameBuilder_.setMessage(builderForValue.build()); 4623 } 4624 bitField0_ |= 0x00000002; 4625 return this; 4626 } 4627 /** 4628 * <code>required .TableName table_name = 2;</code> 4629 */ mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)4630 public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) { 4631 if (tableNameBuilder_ == null) { 4632 if (((bitField0_ & 0x00000002) == 0x00000002) && 4633 tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { 4634 tableName_ = 4635 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); 4636 } else { 4637 tableName_ = value; 4638 } 4639 onChanged(); 4640 } else { 4641 tableNameBuilder_.mergeFrom(value); 4642 } 4643 bitField0_ |= 0x00000002; 4644 return this; 4645 } 4646 /** 4647 * <code>required .TableName table_name = 2;</code> 4648 */ clearTableName()4649 public Builder clearTableName() { 4650 if (tableNameBuilder_ == null) { 4651 tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance(); 4652 onChanged(); 4653 } else { 4654 tableNameBuilder_.clear(); 4655 } 4656 bitField0_ = (bitField0_ & ~0x00000002); 4657 return this; 4658 } 4659 /** 4660 * <code>required .TableName table_name = 2;</code> 4661 */ getTableNameBuilder()4662 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { 4663 bitField0_ |= 0x00000002; 4664 onChanged(); 4665 return getTableNameFieldBuilder().getBuilder(); 4666 } 4667 /** 4668 * <code>required .TableName table_name = 2;</code> 4669 */ getTableNameOrBuilder()4670 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { 4671 if (tableNameBuilder_ != null) { 4672 return tableNameBuilder_.getMessageOrBuilder(); 4673 } else { 4674 return tableName_; 4675 } 4676 } 4677 /** 4678 * <code>required .TableName table_name = 2;</code> 4679 */ 4680 private com.google.protobuf.SingleFieldBuilder< 4681 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> getTableNameFieldBuilder()4682 getTableNameFieldBuilder() { 4683 if (tableNameBuilder_ == null) { 4684 tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< 4685 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>( 4686 tableName_, 4687 getParentForChildren(), 4688 isClean()); 4689 tableName_ = null; 4690 } 4691 return tableNameBuilder_; 4692 } 4693 4694 // optional bytes start_key = 3; 4695 private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; 4696 /** 4697 * <code>optional bytes start_key = 3;</code> 4698 */ hasStartKey()4699 public boolean hasStartKey() { 4700 return ((bitField0_ & 0x00000004) == 0x00000004); 4701 } 4702 /** 4703 * <code>optional bytes start_key = 3;</code> 4704 */ getStartKey()4705 public com.google.protobuf.ByteString getStartKey() { 4706 return startKey_; 4707 } 4708 /** 4709 * <code>optional bytes start_key = 3;</code> 4710 */ setStartKey(com.google.protobuf.ByteString value)4711 public Builder setStartKey(com.google.protobuf.ByteString value) { 4712 if (value == null) { 4713 throw new NullPointerException(); 4714 } 4715 bitField0_ |= 0x00000004; 4716 startKey_ = value; 4717 onChanged(); 4718 return this; 4719 } 4720 /** 4721 * <code>optional bytes start_key = 3;</code> 4722 */ clearStartKey()4723 public Builder clearStartKey() { 4724 bitField0_ = (bitField0_ & ~0x00000004); 4725 startKey_ = getDefaultInstance().getStartKey(); 4726 onChanged(); 4727 return this; 4728 } 4729 4730 // optional bytes end_key = 4; 4731 private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; 4732 /** 4733 * <code>optional bytes end_key = 4;</code> 4734 */ hasEndKey()4735 public boolean hasEndKey() { 4736 return ((bitField0_ & 0x00000008) == 0x00000008); 4737 } 4738 /** 4739 * <code>optional bytes end_key = 4;</code> 4740 */ getEndKey()4741 public com.google.protobuf.ByteString getEndKey() { 4742 return endKey_; 4743 } 4744 /** 4745 * <code>optional bytes end_key = 4;</code> 4746 */ setEndKey(com.google.protobuf.ByteString value)4747 public Builder setEndKey(com.google.protobuf.ByteString value) { 4748 if (value == null) { 4749 throw new NullPointerException(); 4750 } 4751 bitField0_ |= 0x00000008; 4752 endKey_ = value; 4753 onChanged(); 4754 return this; 4755 } 4756 /** 4757 * <code>optional bytes end_key = 4;</code> 4758 */ clearEndKey()4759 public Builder clearEndKey() { 4760 bitField0_ = (bitField0_ & ~0x00000008); 4761 endKey_ = getDefaultInstance().getEndKey(); 4762 onChanged(); 4763 return this; 4764 } 4765 4766 // optional bool offline = 5; 4767 private boolean offline_ ; 4768 /** 4769 * <code>optional bool offline = 5;</code> 4770 */ hasOffline()4771 public boolean hasOffline() { 4772 return ((bitField0_ & 0x00000010) == 0x00000010); 4773 } 4774 /** 4775 * <code>optional bool offline = 5;</code> 4776 */ getOffline()4777 public boolean getOffline() { 4778 return offline_; 4779 } 4780 /** 4781 * <code>optional bool offline = 5;</code> 4782 */ setOffline(boolean value)4783 public Builder setOffline(boolean value) { 4784 bitField0_ |= 0x00000010; 4785 offline_ = value; 4786 onChanged(); 4787 return this; 4788 } 4789 /** 4790 * <code>optional bool offline = 5;</code> 4791 */ clearOffline()4792 public Builder clearOffline() { 4793 bitField0_ = (bitField0_ & ~0x00000010); 4794 offline_ = false; 4795 onChanged(); 4796 return this; 4797 } 4798 4799 // optional bool split = 6; 4800 private boolean split_ ; 4801 /** 4802 * <code>optional bool split = 6;</code> 4803 */ hasSplit()4804 public boolean hasSplit() { 4805 return ((bitField0_ & 0x00000020) == 0x00000020); 4806 } 4807 /** 4808 * <code>optional bool split = 6;</code> 4809 */ getSplit()4810 public boolean getSplit() { 4811 return split_; 4812 } 4813 /** 4814 * <code>optional bool split = 6;</code> 4815 */ setSplit(boolean value)4816 public Builder setSplit(boolean value) { 4817 bitField0_ |= 0x00000020; 4818 split_ = value; 4819 onChanged(); 4820 return this; 4821 } 4822 /** 4823 * <code>optional bool split = 6;</code> 4824 */ clearSplit()4825 public Builder clearSplit() { 4826 bitField0_ = (bitField0_ & ~0x00000020); 4827 split_ = false; 4828 onChanged(); 4829 return this; 4830 } 4831 4832 // optional int32 replica_id = 7 [default = 0]; 4833 private int replicaId_ ; 4834 /** 4835 * <code>optional int32 replica_id = 7 [default = 0];</code> 4836 */ hasReplicaId()4837 public boolean hasReplicaId() { 4838 return ((bitField0_ & 0x00000040) == 0x00000040); 4839 } 4840 /** 4841 * <code>optional int32 replica_id = 7 [default = 0];</code> 4842 */ getReplicaId()4843 public int getReplicaId() { 4844 return replicaId_; 4845 } 4846 /** 4847 * <code>optional int32 replica_id = 7 [default = 0];</code> 4848 */ setReplicaId(int value)4849 public Builder setReplicaId(int value) { 4850 bitField0_ |= 0x00000040; 4851 replicaId_ = value; 4852 onChanged(); 4853 return this; 4854 } 4855 /** 4856 * <code>optional int32 replica_id = 7 [default = 0];</code> 4857 */ clearReplicaId()4858 public Builder clearReplicaId() { 4859 bitField0_ = (bitField0_ & ~0x00000040); 4860 replicaId_ = 0; 4861 onChanged(); 4862 return this; 4863 } 4864 4865 // @@protoc_insertion_point(builder_scope:RegionInfo) 4866 } 4867 4868 static { 4869 defaultInstance = new RegionInfo(true); defaultInstance.initFields()4870 defaultInstance.initFields(); 4871 } 4872 4873 // @@protoc_insertion_point(class_scope:RegionInfo) 4874 } 4875 4876 public interface FavoredNodesOrBuilder 4877 extends com.google.protobuf.MessageOrBuilder { 4878 4879 // repeated .ServerName favored_node = 1; 4880 /** 4881 * <code>repeated .ServerName favored_node = 1;</code> 4882 */ 4883 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList()4884 getFavoredNodeList(); 4885 /** 4886 * <code>repeated .ServerName favored_node = 1;</code> 4887 */ getFavoredNode(int index)4888 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index); 4889 /** 4890 * <code>repeated .ServerName favored_node = 1;</code> 4891 */ getFavoredNodeCount()4892 int getFavoredNodeCount(); 4893 /** 4894 * <code>repeated .ServerName favored_node = 1;</code> 4895 */ 4896 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList()4897 getFavoredNodeOrBuilderList(); 4898 /** 4899 * <code>repeated .ServerName favored_node = 1;</code> 4900 */ getFavoredNodeOrBuilder( int index)4901 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( 4902 int index); 4903 } 4904 /** 4905 * Protobuf type {@code FavoredNodes} 4906 * 4907 * <pre> 4908 ** 4909 * Protocol buffer for favored nodes 4910 * </pre> 4911 */ 4912 public static final class FavoredNodes extends 4913 com.google.protobuf.GeneratedMessage 4914 implements FavoredNodesOrBuilder { 4915 // Use FavoredNodes.newBuilder() to construct. FavoredNodes(com.google.protobuf.GeneratedMessage.Builder<?> builder)4916 private FavoredNodes(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 4917 super(builder); 4918 this.unknownFields = builder.getUnknownFields(); 4919 } FavoredNodes(boolean noInit)4920 private FavoredNodes(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 4921 4922 private static final FavoredNodes defaultInstance; getDefaultInstance()4923 public static FavoredNodes getDefaultInstance() { 4924 return defaultInstance; 4925 } 4926 getDefaultInstanceForType()4927 public FavoredNodes getDefaultInstanceForType() { 4928 return defaultInstance; 4929 } 4930 4931 private final com.google.protobuf.UnknownFieldSet unknownFields; 4932 @java.lang.Override 4933 public final com.google.protobuf.UnknownFieldSet getUnknownFields()4934 getUnknownFields() { 4935 return this.unknownFields; 4936 } FavoredNodes( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4937 private FavoredNodes( 4938 com.google.protobuf.CodedInputStream input, 4939 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4940 throws com.google.protobuf.InvalidProtocolBufferException { 4941 initFields(); 4942 int mutable_bitField0_ = 0; 4943 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 4944 com.google.protobuf.UnknownFieldSet.newBuilder(); 4945 try { 4946 boolean done = false; 4947 while (!done) { 4948 int tag = input.readTag(); 4949 switch (tag) { 4950 case 0: 4951 done = true; 4952 break; 4953 default: { 4954 if (!parseUnknownField(input, unknownFields, 4955 extensionRegistry, tag)) { 4956 done = true; 4957 } 4958 break; 4959 } 4960 case 10: { 4961 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 4962 favoredNode_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(); 4963 mutable_bitField0_ |= 0x00000001; 4964 } 4965 favoredNode_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); 4966 break; 4967 } 4968 } 4969 } 4970 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4971 throw e.setUnfinishedMessage(this); 4972 } catch (java.io.IOException e) { 4973 throw new com.google.protobuf.InvalidProtocolBufferException( 4974 e.getMessage()).setUnfinishedMessage(this); 4975 } finally { 4976 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 4977 favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); 4978 } 4979 this.unknownFields = unknownFields.build(); 4980 makeExtensionsImmutable(); 4981 } 4982 } 4983 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4984 getDescriptor() { 4985 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_FavoredNodes_descriptor; 4986 } 4987 4988 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4989 internalGetFieldAccessorTable() { 4990 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_FavoredNodes_fieldAccessorTable 4991 .ensureFieldAccessorsInitialized( 4992 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); 4993 } 4994 4995 public static com.google.protobuf.Parser<FavoredNodes> PARSER = 4996 new com.google.protobuf.AbstractParser<FavoredNodes>() { 4997 public FavoredNodes parsePartialFrom( 4998 com.google.protobuf.CodedInputStream input, 4999 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5000 throws com.google.protobuf.InvalidProtocolBufferException { 5001 return new FavoredNodes(input, extensionRegistry); 5002 } 5003 }; 5004 5005 @java.lang.Override getParserForType()5006 public com.google.protobuf.Parser<FavoredNodes> getParserForType() { 5007 return PARSER; 5008 } 5009 5010 // repeated .ServerName favored_node = 1; 5011 public static final int FAVORED_NODE_FIELD_NUMBER = 1; 5012 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNode_; 5013 /** 5014 * <code>repeated .ServerName favored_node = 1;</code> 5015 */ getFavoredNodeList()5016 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList() { 5017 return favoredNode_; 5018 } 5019 /** 5020 * <code>repeated .ServerName favored_node = 1;</code> 5021 */ 5022 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList()5023 getFavoredNodeOrBuilderList() { 5024 return favoredNode_; 5025 } 5026 /** 5027 * <code>repeated .ServerName favored_node = 1;</code> 5028 */ getFavoredNodeCount()5029 public int getFavoredNodeCount() { 5030 return favoredNode_.size(); 5031 } 5032 /** 5033 * <code>repeated .ServerName favored_node = 1;</code> 5034 */ getFavoredNode(int index)5035 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index) { 5036 return favoredNode_.get(index); 5037 } 5038 /** 5039 * <code>repeated .ServerName favored_node = 1;</code> 5040 */ getFavoredNodeOrBuilder( int index)5041 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( 5042 int index) { 5043 return favoredNode_.get(index); 5044 } 5045 initFields()5046 private void initFields() { 5047 favoredNode_ = java.util.Collections.emptyList(); 5048 } 5049 private byte memoizedIsInitialized = -1; isInitialized()5050 public final boolean isInitialized() { 5051 byte isInitialized = memoizedIsInitialized; 5052 if (isInitialized != -1) return isInitialized == 1; 5053 5054 for (int i = 0; i < getFavoredNodeCount(); i++) { 5055 if (!getFavoredNode(i).isInitialized()) { 5056 memoizedIsInitialized = 0; 5057 return false; 5058 } 5059 } 5060 memoizedIsInitialized = 1; 5061 return true; 5062 } 5063 writeTo(com.google.protobuf.CodedOutputStream output)5064 public void writeTo(com.google.protobuf.CodedOutputStream output) 5065 throws java.io.IOException { 5066 getSerializedSize(); 5067 for (int i = 0; i < favoredNode_.size(); i++) { 5068 output.writeMessage(1, favoredNode_.get(i)); 5069 } 5070 getUnknownFields().writeTo(output); 5071 } 5072 5073 private int memoizedSerializedSize = -1; getSerializedSize()5074 public int getSerializedSize() { 5075 int size = memoizedSerializedSize; 5076 if (size != -1) return size; 5077 5078 size = 0; 5079 for (int i = 0; i < favoredNode_.size(); i++) { 5080 size += com.google.protobuf.CodedOutputStream 5081 .computeMessageSize(1, favoredNode_.get(i)); 5082 } 5083 size += getUnknownFields().getSerializedSize(); 5084 memoizedSerializedSize = size; 5085 return size; 5086 } 5087 5088 private static final long serialVersionUID = 0L; 5089 @java.lang.Override writeReplace()5090 protected java.lang.Object writeReplace() 5091 throws java.io.ObjectStreamException { 5092 return super.writeReplace(); 5093 } 5094 5095 @java.lang.Override equals(final java.lang.Object obj)5096 public boolean equals(final java.lang.Object obj) { 5097 if (obj == this) { 5098 return true; 5099 } 5100 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes)) { 5101 return super.equals(obj); 5102 } 5103 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes) obj; 5104 5105 boolean result = true; 5106 result = result && getFavoredNodeList() 5107 .equals(other.getFavoredNodeList()); 5108 result = result && 5109 getUnknownFields().equals(other.getUnknownFields()); 5110 return result; 5111 } 5112 5113 private int memoizedHashCode = 0; 5114 @java.lang.Override hashCode()5115 public int hashCode() { 5116 if (memoizedHashCode != 0) { 5117 return memoizedHashCode; 5118 } 5119 int hash = 41; 5120 hash = (19 * hash) + getDescriptorForType().hashCode(); 5121 if (getFavoredNodeCount() > 0) { 5122 hash = (37 * hash) + FAVORED_NODE_FIELD_NUMBER; 5123 hash = (53 * hash) + getFavoredNodeList().hashCode(); 5124 } 5125 hash = (29 * hash) + getUnknownFields().hashCode(); 5126 memoizedHashCode = hash; 5127 return hash; 5128 } 5129 parseFrom( com.google.protobuf.ByteString data)5130 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( 5131 com.google.protobuf.ByteString data) 5132 throws com.google.protobuf.InvalidProtocolBufferException { 5133 return PARSER.parseFrom(data); 5134 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5135 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( 5136 com.google.protobuf.ByteString data, 5137 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5138 throws com.google.protobuf.InvalidProtocolBufferException { 5139 return PARSER.parseFrom(data, extensionRegistry); 5140 } parseFrom(byte[] data)5141 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(byte[] data) 5142 throws com.google.protobuf.InvalidProtocolBufferException { 5143 return PARSER.parseFrom(data); 5144 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5145 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( 5146 byte[] data, 5147 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5148 throws com.google.protobuf.InvalidProtocolBufferException { 5149 return PARSER.parseFrom(data, extensionRegistry); 5150 } parseFrom(java.io.InputStream input)5151 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom(java.io.InputStream input) 5152 throws java.io.IOException { 5153 return PARSER.parseFrom(input); 5154 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5155 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( 5156 java.io.InputStream input, 5157 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5158 throws java.io.IOException { 5159 return PARSER.parseFrom(input, extensionRegistry); 5160 } parseDelimitedFrom(java.io.InputStream input)5161 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom(java.io.InputStream input) 5162 throws java.io.IOException { 5163 return PARSER.parseDelimitedFrom(input); 5164 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5165 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseDelimitedFrom( 5166 java.io.InputStream input, 5167 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5168 throws java.io.IOException { 5169 return PARSER.parseDelimitedFrom(input, extensionRegistry); 5170 } parseFrom( com.google.protobuf.CodedInputStream input)5171 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( 5172 com.google.protobuf.CodedInputStream input) 5173 throws java.io.IOException { 5174 return PARSER.parseFrom(input); 5175 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5176 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parseFrom( 5177 com.google.protobuf.CodedInputStream input, 5178 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5179 throws java.io.IOException { 5180 return PARSER.parseFrom(input, extensionRegistry); 5181 } 5182 newBuilder()5183 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()5184 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes prototype)5185 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes prototype) { 5186 return newBuilder().mergeFrom(prototype); 5187 } toBuilder()5188 public Builder toBuilder() { return newBuilder(this); } 5189 5190 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5191 protected Builder newBuilderForType( 5192 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5193 Builder builder = new Builder(parent); 5194 return builder; 5195 } 5196 /** 5197 * Protobuf type {@code FavoredNodes} 5198 * 5199 * <pre> 5200 ** 5201 * Protocol buffer for favored nodes 5202 * </pre> 5203 */ 5204 public static final class Builder extends 5205 com.google.protobuf.GeneratedMessage.Builder<Builder> 5206 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodesOrBuilder { 5207 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5208 getDescriptor() { 5209 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_FavoredNodes_descriptor; 5210 } 5211 5212 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5213 internalGetFieldAccessorTable() { 5214 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_FavoredNodes_fieldAccessorTable 5215 .ensureFieldAccessorsInitialized( 5216 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.Builder.class); 5217 } 5218 5219 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.newBuilder() Builder()5220 private Builder() { 5221 maybeForceBuilderInitialization(); 5222 } 5223 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5224 private Builder( 5225 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5226 super(parent); 5227 maybeForceBuilderInitialization(); 5228 } maybeForceBuilderInitialization()5229 private void maybeForceBuilderInitialization() { 5230 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 5231 getFavoredNodeFieldBuilder(); 5232 } 5233 } create()5234 private static Builder create() { 5235 return new Builder(); 5236 } 5237 clear()5238 public Builder clear() { 5239 super.clear(); 5240 if (favoredNodeBuilder_ == null) { 5241 favoredNode_ = java.util.Collections.emptyList(); 5242 bitField0_ = (bitField0_ & ~0x00000001); 5243 } else { 5244 favoredNodeBuilder_.clear(); 5245 } 5246 return this; 5247 } 5248 clone()5249 public Builder clone() { 5250 return create().mergeFrom(buildPartial()); 5251 } 5252 5253 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()5254 getDescriptorForType() { 5255 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_FavoredNodes_descriptor; 5256 } 5257 getDefaultInstanceForType()5258 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes getDefaultInstanceForType() { 5259 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.getDefaultInstance(); 5260 } 5261 build()5262 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes build() { 5263 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes result = buildPartial(); 5264 if (!result.isInitialized()) { 5265 throw newUninitializedMessageException(result); 5266 } 5267 return result; 5268 } 5269 buildPartial()5270 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes buildPartial() { 5271 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes(this); 5272 int from_bitField0_ = bitField0_; 5273 if (favoredNodeBuilder_ == null) { 5274 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5275 favoredNode_ = java.util.Collections.unmodifiableList(favoredNode_); 5276 bitField0_ = (bitField0_ & ~0x00000001); 5277 } 5278 result.favoredNode_ = favoredNode_; 5279 } else { 5280 result.favoredNode_ = favoredNodeBuilder_.build(); 5281 } 5282 onBuilt(); 5283 return result; 5284 } 5285 mergeFrom(com.google.protobuf.Message other)5286 public Builder mergeFrom(com.google.protobuf.Message other) { 5287 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes) { 5288 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes)other); 5289 } else { 5290 super.mergeFrom(other); 5291 return this; 5292 } 5293 } 5294 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes other)5295 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes other) { 5296 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes.getDefaultInstance()) return this; 5297 if (favoredNodeBuilder_ == null) { 5298 if (!other.favoredNode_.isEmpty()) { 5299 if (favoredNode_.isEmpty()) { 5300 favoredNode_ = other.favoredNode_; 5301 bitField0_ = (bitField0_ & ~0x00000001); 5302 } else { 5303 ensureFavoredNodeIsMutable(); 5304 favoredNode_.addAll(other.favoredNode_); 5305 } 5306 onChanged(); 5307 } 5308 } else { 5309 if (!other.favoredNode_.isEmpty()) { 5310 if (favoredNodeBuilder_.isEmpty()) { 5311 favoredNodeBuilder_.dispose(); 5312 favoredNodeBuilder_ = null; 5313 favoredNode_ = other.favoredNode_; 5314 bitField0_ = (bitField0_ & ~0x00000001); 5315 favoredNodeBuilder_ = 5316 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 5317 getFavoredNodeFieldBuilder() : null; 5318 } else { 5319 favoredNodeBuilder_.addAllMessages(other.favoredNode_); 5320 } 5321 } 5322 } 5323 this.mergeUnknownFields(other.getUnknownFields()); 5324 return this; 5325 } 5326 isInitialized()5327 public final boolean isInitialized() { 5328 for (int i = 0; i < getFavoredNodeCount(); i++) { 5329 if (!getFavoredNode(i).isInitialized()) { 5330 5331 return false; 5332 } 5333 } 5334 return true; 5335 } 5336 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5337 public Builder mergeFrom( 5338 com.google.protobuf.CodedInputStream input, 5339 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5340 throws java.io.IOException { 5341 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes parsedMessage = null; 5342 try { 5343 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 5344 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5345 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.FavoredNodes) e.getUnfinishedMessage(); 5346 throw e; 5347 } finally { 5348 if (parsedMessage != null) { 5349 mergeFrom(parsedMessage); 5350 } 5351 } 5352 return this; 5353 } 5354 private int bitField0_; 5355 5356 // repeated .ServerName favored_node = 1; 5357 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNode_ = 5358 java.util.Collections.emptyList(); ensureFavoredNodeIsMutable()5359 private void ensureFavoredNodeIsMutable() { 5360 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 5361 favoredNode_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(favoredNode_); 5362 bitField0_ |= 0x00000001; 5363 } 5364 } 5365 5366 private com.google.protobuf.RepeatedFieldBuilder< 5367 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodeBuilder_; 5368 5369 /** 5370 * <code>repeated .ServerName favored_node = 1;</code> 5371 */ getFavoredNodeList()5372 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodeList() { 5373 if (favoredNodeBuilder_ == null) { 5374 return java.util.Collections.unmodifiableList(favoredNode_); 5375 } else { 5376 return favoredNodeBuilder_.getMessageList(); 5377 } 5378 } 5379 /** 5380 * <code>repeated .ServerName favored_node = 1;</code> 5381 */ getFavoredNodeCount()5382 public int getFavoredNodeCount() { 5383 if (favoredNodeBuilder_ == null) { 5384 return favoredNode_.size(); 5385 } else { 5386 return favoredNodeBuilder_.getCount(); 5387 } 5388 } 5389 /** 5390 * <code>repeated .ServerName favored_node = 1;</code> 5391 */ getFavoredNode(int index)5392 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNode(int index) { 5393 if (favoredNodeBuilder_ == null) { 5394 return favoredNode_.get(index); 5395 } else { 5396 return favoredNodeBuilder_.getMessage(index); 5397 } 5398 } 5399 /** 5400 * <code>repeated .ServerName favored_node = 1;</code> 5401 */ setFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)5402 public Builder setFavoredNode( 5403 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 5404 if (favoredNodeBuilder_ == null) { 5405 if (value == null) { 5406 throw new NullPointerException(); 5407 } 5408 ensureFavoredNodeIsMutable(); 5409 favoredNode_.set(index, value); 5410 onChanged(); 5411 } else { 5412 favoredNodeBuilder_.setMessage(index, value); 5413 } 5414 return this; 5415 } 5416 /** 5417 * <code>repeated .ServerName favored_node = 1;</code> 5418 */ setFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)5419 public Builder setFavoredNode( 5420 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 5421 if (favoredNodeBuilder_ == null) { 5422 ensureFavoredNodeIsMutable(); 5423 favoredNode_.set(index, builderForValue.build()); 5424 onChanged(); 5425 } else { 5426 favoredNodeBuilder_.setMessage(index, builderForValue.build()); 5427 } 5428 return this; 5429 } 5430 /** 5431 * <code>repeated .ServerName favored_node = 1;</code> 5432 */ addFavoredNode(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)5433 public Builder addFavoredNode(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 5434 if (favoredNodeBuilder_ == null) { 5435 if (value == null) { 5436 throw new NullPointerException(); 5437 } 5438 ensureFavoredNodeIsMutable(); 5439 favoredNode_.add(value); 5440 onChanged(); 5441 } else { 5442 favoredNodeBuilder_.addMessage(value); 5443 } 5444 return this; 5445 } 5446 /** 5447 * <code>repeated .ServerName favored_node = 1;</code> 5448 */ addFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)5449 public Builder addFavoredNode( 5450 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 5451 if (favoredNodeBuilder_ == null) { 5452 if (value == null) { 5453 throw new NullPointerException(); 5454 } 5455 ensureFavoredNodeIsMutable(); 5456 favoredNode_.add(index, value); 5457 onChanged(); 5458 } else { 5459 favoredNodeBuilder_.addMessage(index, value); 5460 } 5461 return this; 5462 } 5463 /** 5464 * <code>repeated .ServerName favored_node = 1;</code> 5465 */ addFavoredNode( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)5466 public Builder addFavoredNode( 5467 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 5468 if (favoredNodeBuilder_ == null) { 5469 ensureFavoredNodeIsMutable(); 5470 favoredNode_.add(builderForValue.build()); 5471 onChanged(); 5472 } else { 5473 favoredNodeBuilder_.addMessage(builderForValue.build()); 5474 } 5475 return this; 5476 } 5477 /** 5478 * <code>repeated .ServerName favored_node = 1;</code> 5479 */ addFavoredNode( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)5480 public Builder addFavoredNode( 5481 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 5482 if (favoredNodeBuilder_ == null) { 5483 ensureFavoredNodeIsMutable(); 5484 favoredNode_.add(index, builderForValue.build()); 5485 onChanged(); 5486 } else { 5487 favoredNodeBuilder_.addMessage(index, builderForValue.build()); 5488 } 5489 return this; 5490 } 5491 /** 5492 * <code>repeated .ServerName favored_node = 1;</code> 5493 */ addAllFavoredNode( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values)5494 public Builder addAllFavoredNode( 5495 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) { 5496 if (favoredNodeBuilder_ == null) { 5497 ensureFavoredNodeIsMutable(); 5498 super.addAll(values, favoredNode_); 5499 onChanged(); 5500 } else { 5501 favoredNodeBuilder_.addAllMessages(values); 5502 } 5503 return this; 5504 } 5505 /** 5506 * <code>repeated .ServerName favored_node = 1;</code> 5507 */ clearFavoredNode()5508 public Builder clearFavoredNode() { 5509 if (favoredNodeBuilder_ == null) { 5510 favoredNode_ = java.util.Collections.emptyList(); 5511 bitField0_ = (bitField0_ & ~0x00000001); 5512 onChanged(); 5513 } else { 5514 favoredNodeBuilder_.clear(); 5515 } 5516 return this; 5517 } 5518 /** 5519 * <code>repeated .ServerName favored_node = 1;</code> 5520 */ removeFavoredNode(int index)5521 public Builder removeFavoredNode(int index) { 5522 if (favoredNodeBuilder_ == null) { 5523 ensureFavoredNodeIsMutable(); 5524 favoredNode_.remove(index); 5525 onChanged(); 5526 } else { 5527 favoredNodeBuilder_.remove(index); 5528 } 5529 return this; 5530 } 5531 /** 5532 * <code>repeated .ServerName favored_node = 1;</code> 5533 */ getFavoredNodeBuilder( int index)5534 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getFavoredNodeBuilder( 5535 int index) { 5536 return getFavoredNodeFieldBuilder().getBuilder(index); 5537 } 5538 /** 5539 * <code>repeated .ServerName favored_node = 1;</code> 5540 */ getFavoredNodeOrBuilder( int index)5541 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodeOrBuilder( 5542 int index) { 5543 if (favoredNodeBuilder_ == null) { 5544 return favoredNode_.get(index); } else { 5545 return favoredNodeBuilder_.getMessageOrBuilder(index); 5546 } 5547 } 5548 /** 5549 * <code>repeated .ServerName favored_node = 1;</code> 5550 */ 5551 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeOrBuilderList()5552 getFavoredNodeOrBuilderList() { 5553 if (favoredNodeBuilder_ != null) { 5554 return favoredNodeBuilder_.getMessageOrBuilderList(); 5555 } else { 5556 return java.util.Collections.unmodifiableList(favoredNode_); 5557 } 5558 } 5559 /** 5560 * <code>repeated .ServerName favored_node = 1;</code> 5561 */ addFavoredNodeBuilder()5562 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodeBuilder() { 5563 return getFavoredNodeFieldBuilder().addBuilder( 5564 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); 5565 } 5566 /** 5567 * <code>repeated .ServerName favored_node = 1;</code> 5568 */ addFavoredNodeBuilder( int index)5569 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodeBuilder( 5570 int index) { 5571 return getFavoredNodeFieldBuilder().addBuilder( 5572 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); 5573 } 5574 /** 5575 * <code>repeated .ServerName favored_node = 1;</code> 5576 */ 5577 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> getFavoredNodeBuilderList()5578 getFavoredNodeBuilderList() { 5579 return getFavoredNodeFieldBuilder().getBuilderList(); 5580 } 5581 private com.google.protobuf.RepeatedFieldBuilder< 5582 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodeFieldBuilder()5583 getFavoredNodeFieldBuilder() { 5584 if (favoredNodeBuilder_ == null) { 5585 favoredNodeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 5586 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( 5587 favoredNode_, 5588 ((bitField0_ & 0x00000001) == 0x00000001), 5589 getParentForChildren(), 5590 isClean()); 5591 favoredNode_ = null; 5592 } 5593 return favoredNodeBuilder_; 5594 } 5595 5596 // @@protoc_insertion_point(builder_scope:FavoredNodes) 5597 } 5598 5599 static { 5600 defaultInstance = new FavoredNodes(true); defaultInstance.initFields()5601 defaultInstance.initFields(); 5602 } 5603 5604 // @@protoc_insertion_point(class_scope:FavoredNodes) 5605 } 5606 5607 public interface RegionSpecifierOrBuilder 5608 extends com.google.protobuf.MessageOrBuilder { 5609 5610 // required .RegionSpecifier.RegionSpecifierType type = 1; 5611 /** 5612 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 5613 */ hasType()5614 boolean hasType(); 5615 /** 5616 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 5617 */ getType()5618 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); 5619 5620 // required bytes value = 2; 5621 /** 5622 * <code>required bytes value = 2;</code> 5623 */ hasValue()5624 boolean hasValue(); 5625 /** 5626 * <code>required bytes value = 2;</code> 5627 */ getValue()5628 com.google.protobuf.ByteString getValue(); 5629 } 5630 /** 5631 * Protobuf type {@code RegionSpecifier} 5632 * 5633 * <pre> 5634 ** 5635 * Container protocol buffer to specify a region. 5636 * You can specify region by region name, or the hash 5637 * of the region name, which is known as encoded 5638 * region name. 5639 * </pre> 5640 */ 5641 public static final class RegionSpecifier extends 5642 com.google.protobuf.GeneratedMessage 5643 implements RegionSpecifierOrBuilder { 5644 // Use RegionSpecifier.newBuilder() to construct. RegionSpecifier(com.google.protobuf.GeneratedMessage.Builder<?> builder)5645 private RegionSpecifier(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 5646 super(builder); 5647 this.unknownFields = builder.getUnknownFields(); 5648 } RegionSpecifier(boolean noInit)5649 private RegionSpecifier(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 5650 5651 private static final RegionSpecifier defaultInstance; getDefaultInstance()5652 public static RegionSpecifier getDefaultInstance() { 5653 return defaultInstance; 5654 } 5655 getDefaultInstanceForType()5656 public RegionSpecifier getDefaultInstanceForType() { 5657 return defaultInstance; 5658 } 5659 5660 private final com.google.protobuf.UnknownFieldSet unknownFields; 5661 @java.lang.Override 5662 public final com.google.protobuf.UnknownFieldSet getUnknownFields()5663 getUnknownFields() { 5664 return this.unknownFields; 5665 } RegionSpecifier( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5666 private RegionSpecifier( 5667 com.google.protobuf.CodedInputStream input, 5668 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5669 throws com.google.protobuf.InvalidProtocolBufferException { 5670 initFields(); 5671 int mutable_bitField0_ = 0; 5672 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 5673 com.google.protobuf.UnknownFieldSet.newBuilder(); 5674 try { 5675 boolean done = false; 5676 while (!done) { 5677 int tag = input.readTag(); 5678 switch (tag) { 5679 case 0: 5680 done = true; 5681 break; 5682 default: { 5683 if (!parseUnknownField(input, unknownFields, 5684 extensionRegistry, tag)) { 5685 done = true; 5686 } 5687 break; 5688 } 5689 case 8: { 5690 int rawValue = input.readEnum(); 5691 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); 5692 if (value == null) { 5693 unknownFields.mergeVarintField(1, rawValue); 5694 } else { 5695 bitField0_ |= 0x00000001; 5696 type_ = value; 5697 } 5698 break; 5699 } 5700 case 18: { 5701 bitField0_ |= 0x00000002; 5702 value_ = input.readBytes(); 5703 break; 5704 } 5705 } 5706 } 5707 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5708 throw e.setUnfinishedMessage(this); 5709 } catch (java.io.IOException e) { 5710 throw new com.google.protobuf.InvalidProtocolBufferException( 5711 e.getMessage()).setUnfinishedMessage(this); 5712 } finally { 5713 this.unknownFields = unknownFields.build(); 5714 makeExtensionsImmutable(); 5715 } 5716 } 5717 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5718 getDescriptor() { 5719 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; 5720 } 5721 5722 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5723 internalGetFieldAccessorTable() { 5724 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable 5725 .ensureFieldAccessorsInitialized( 5726 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); 5727 } 5728 5729 public static com.google.protobuf.Parser<RegionSpecifier> PARSER = 5730 new com.google.protobuf.AbstractParser<RegionSpecifier>() { 5731 public RegionSpecifier parsePartialFrom( 5732 com.google.protobuf.CodedInputStream input, 5733 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5734 throws com.google.protobuf.InvalidProtocolBufferException { 5735 return new RegionSpecifier(input, extensionRegistry); 5736 } 5737 }; 5738 5739 @java.lang.Override getParserForType()5740 public com.google.protobuf.Parser<RegionSpecifier> getParserForType() { 5741 return PARSER; 5742 } 5743 5744 /** 5745 * Protobuf enum {@code RegionSpecifier.RegionSpecifierType} 5746 */ 5747 public enum RegionSpecifierType 5748 implements com.google.protobuf.ProtocolMessageEnum { 5749 /** 5750 * <code>REGION_NAME = 1;</code> 5751 * 5752 * <pre> 5753 * <tablename>,<startkey>,<regionId>.<encodedName> 5754 * </pre> 5755 */ 5756 REGION_NAME(0, 1), 5757 /** 5758 * <code>ENCODED_REGION_NAME = 2;</code> 5759 * 5760 * <pre> 5761 * hash of <tablename>,<startkey>,<regionId> 5762 * </pre> 5763 */ 5764 ENCODED_REGION_NAME(1, 2), 5765 ; 5766 5767 /** 5768 * <code>REGION_NAME = 1;</code> 5769 * 5770 * <pre> 5771 * <tablename>,<startkey>,<regionId>.<encodedName> 5772 * </pre> 5773 */ 5774 public static final int REGION_NAME_VALUE = 1; 5775 /** 5776 * <code>ENCODED_REGION_NAME = 2;</code> 5777 * 5778 * <pre> 5779 * hash of <tablename>,<startkey>,<regionId> 5780 * </pre> 5781 */ 5782 public static final int ENCODED_REGION_NAME_VALUE = 2; 5783 5784 getNumber()5785 public final int getNumber() { return value; } 5786 valueOf(int value)5787 public static RegionSpecifierType valueOf(int value) { 5788 switch (value) { 5789 case 1: return REGION_NAME; 5790 case 2: return ENCODED_REGION_NAME; 5791 default: return null; 5792 } 5793 } 5794 5795 public static com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType> internalGetValueMap()5796 internalGetValueMap() { 5797 return internalValueMap; 5798 } 5799 private static com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType> 5800 internalValueMap = 5801 new com.google.protobuf.Internal.EnumLiteMap<RegionSpecifierType>() { 5802 public RegionSpecifierType findValueByNumber(int number) { 5803 return RegionSpecifierType.valueOf(number); 5804 } 5805 }; 5806 5807 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()5808 getValueDescriptor() { 5809 return getDescriptor().getValues().get(index); 5810 } 5811 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()5812 getDescriptorForType() { 5813 return getDescriptor(); 5814 } 5815 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()5816 getDescriptor() { 5817 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); 5818 } 5819 5820 private static final RegionSpecifierType[] VALUES = values(); 5821 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)5822 public static RegionSpecifierType valueOf( 5823 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 5824 if (desc.getType() != getDescriptor()) { 5825 throw new java.lang.IllegalArgumentException( 5826 "EnumValueDescriptor is not for this type."); 5827 } 5828 return VALUES[desc.getIndex()]; 5829 } 5830 5831 private final int index; 5832 private final int value; 5833 RegionSpecifierType(int index, int value)5834 private RegionSpecifierType(int index, int value) { 5835 this.index = index; 5836 this.value = value; 5837 } 5838 5839 // @@protoc_insertion_point(enum_scope:RegionSpecifier.RegionSpecifierType) 5840 } 5841 5842 private int bitField0_; 5843 // required .RegionSpecifier.RegionSpecifierType type = 1; 5844 public static final int TYPE_FIELD_NUMBER = 1; 5845 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; 5846 /** 5847 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 5848 */ hasType()5849 public boolean hasType() { 5850 return ((bitField0_ & 0x00000001) == 0x00000001); 5851 } 5852 /** 5853 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 5854 */ getType()5855 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { 5856 return type_; 5857 } 5858 5859 // required bytes value = 2; 5860 public static final int VALUE_FIELD_NUMBER = 2; 5861 private com.google.protobuf.ByteString value_; 5862 /** 5863 * <code>required bytes value = 2;</code> 5864 */ hasValue()5865 public boolean hasValue() { 5866 return ((bitField0_ & 0x00000002) == 0x00000002); 5867 } 5868 /** 5869 * <code>required bytes value = 2;</code> 5870 */ getValue()5871 public com.google.protobuf.ByteString getValue() { 5872 return value_; 5873 } 5874 initFields()5875 private void initFields() { 5876 type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; 5877 value_ = com.google.protobuf.ByteString.EMPTY; 5878 } 5879 private byte memoizedIsInitialized = -1; isInitialized()5880 public final boolean isInitialized() { 5881 byte isInitialized = memoizedIsInitialized; 5882 if (isInitialized != -1) return isInitialized == 1; 5883 5884 if (!hasType()) { 5885 memoizedIsInitialized = 0; 5886 return false; 5887 } 5888 if (!hasValue()) { 5889 memoizedIsInitialized = 0; 5890 return false; 5891 } 5892 memoizedIsInitialized = 1; 5893 return true; 5894 } 5895 writeTo(com.google.protobuf.CodedOutputStream output)5896 public void writeTo(com.google.protobuf.CodedOutputStream output) 5897 throws java.io.IOException { 5898 getSerializedSize(); 5899 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5900 output.writeEnum(1, type_.getNumber()); 5901 } 5902 if (((bitField0_ & 0x00000002) == 0x00000002)) { 5903 output.writeBytes(2, value_); 5904 } 5905 getUnknownFields().writeTo(output); 5906 } 5907 5908 private int memoizedSerializedSize = -1; getSerializedSize()5909 public int getSerializedSize() { 5910 int size = memoizedSerializedSize; 5911 if (size != -1) return size; 5912 5913 size = 0; 5914 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5915 size += com.google.protobuf.CodedOutputStream 5916 .computeEnumSize(1, type_.getNumber()); 5917 } 5918 if (((bitField0_ & 0x00000002) == 0x00000002)) { 5919 size += com.google.protobuf.CodedOutputStream 5920 .computeBytesSize(2, value_); 5921 } 5922 size += getUnknownFields().getSerializedSize(); 5923 memoizedSerializedSize = size; 5924 return size; 5925 } 5926 5927 private static final long serialVersionUID = 0L; 5928 @java.lang.Override writeReplace()5929 protected java.lang.Object writeReplace() 5930 throws java.io.ObjectStreamException { 5931 return super.writeReplace(); 5932 } 5933 5934 @java.lang.Override equals(final java.lang.Object obj)5935 public boolean equals(final java.lang.Object obj) { 5936 if (obj == this) { 5937 return true; 5938 } 5939 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)) { 5940 return super.equals(obj); 5941 } 5942 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) obj; 5943 5944 boolean result = true; 5945 result = result && (hasType() == other.hasType()); 5946 if (hasType()) { 5947 result = result && 5948 (getType() == other.getType()); 5949 } 5950 result = result && (hasValue() == other.hasValue()); 5951 if (hasValue()) { 5952 result = result && getValue() 5953 .equals(other.getValue()); 5954 } 5955 result = result && 5956 getUnknownFields().equals(other.getUnknownFields()); 5957 return result; 5958 } 5959 5960 private int memoizedHashCode = 0; 5961 @java.lang.Override hashCode()5962 public int hashCode() { 5963 if (memoizedHashCode != 0) { 5964 return memoizedHashCode; 5965 } 5966 int hash = 41; 5967 hash = (19 * hash) + getDescriptorForType().hashCode(); 5968 if (hasType()) { 5969 hash = (37 * hash) + TYPE_FIELD_NUMBER; 5970 hash = (53 * hash) + hashEnum(getType()); 5971 } 5972 if (hasValue()) { 5973 hash = (37 * hash) + VALUE_FIELD_NUMBER; 5974 hash = (53 * hash) + getValue().hashCode(); 5975 } 5976 hash = (29 * hash) + getUnknownFields().hashCode(); 5977 memoizedHashCode = hash; 5978 return hash; 5979 } 5980 parseFrom( com.google.protobuf.ByteString data)5981 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( 5982 com.google.protobuf.ByteString data) 5983 throws com.google.protobuf.InvalidProtocolBufferException { 5984 return PARSER.parseFrom(data); 5985 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5986 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( 5987 com.google.protobuf.ByteString data, 5988 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5989 throws com.google.protobuf.InvalidProtocolBufferException { 5990 return PARSER.parseFrom(data, extensionRegistry); 5991 } parseFrom(byte[] data)5992 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) 5993 throws com.google.protobuf.InvalidProtocolBufferException { 5994 return PARSER.parseFrom(data); 5995 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5996 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( 5997 byte[] data, 5998 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5999 throws com.google.protobuf.InvalidProtocolBufferException { 6000 return PARSER.parseFrom(data, extensionRegistry); 6001 } parseFrom(java.io.InputStream input)6002 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) 6003 throws java.io.IOException { 6004 return PARSER.parseFrom(input); 6005 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6006 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( 6007 java.io.InputStream input, 6008 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6009 throws java.io.IOException { 6010 return PARSER.parseFrom(input, extensionRegistry); 6011 } parseDelimitedFrom(java.io.InputStream input)6012 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) 6013 throws java.io.IOException { 6014 return PARSER.parseDelimitedFrom(input); 6015 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6016 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( 6017 java.io.InputStream input, 6018 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6019 throws java.io.IOException { 6020 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6021 } parseFrom( com.google.protobuf.CodedInputStream input)6022 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( 6023 com.google.protobuf.CodedInputStream input) 6024 throws java.io.IOException { 6025 return PARSER.parseFrom(input); 6026 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6027 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( 6028 com.google.protobuf.CodedInputStream input, 6029 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6030 throws java.io.IOException { 6031 return PARSER.parseFrom(input, extensionRegistry); 6032 } 6033 newBuilder()6034 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6035 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype)6036 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { 6037 return newBuilder().mergeFrom(prototype); 6038 } toBuilder()6039 public Builder toBuilder() { return newBuilder(this); } 6040 6041 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6042 protected Builder newBuilderForType( 6043 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6044 Builder builder = new Builder(parent); 6045 return builder; 6046 } 6047 /** 6048 * Protobuf type {@code RegionSpecifier} 6049 * 6050 * <pre> 6051 ** 6052 * Container protocol buffer to specify a region. 6053 * You can specify region by region name, or the hash 6054 * of the region name, which is known as encoded 6055 * region name. 6056 * </pre> 6057 */ 6058 public static final class Builder extends 6059 com.google.protobuf.GeneratedMessage.Builder<Builder> 6060 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { 6061 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6062 getDescriptor() { 6063 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; 6064 } 6065 6066 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6067 internalGetFieldAccessorTable() { 6068 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable 6069 .ensureFieldAccessorsInitialized( 6070 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); 6071 } 6072 6073 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() Builder()6074 private Builder() { 6075 maybeForceBuilderInitialization(); 6076 } 6077 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6078 private Builder( 6079 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6080 super(parent); 6081 maybeForceBuilderInitialization(); 6082 } maybeForceBuilderInitialization()6083 private void maybeForceBuilderInitialization() { 6084 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6085 } 6086 } create()6087 private static Builder create() { 6088 return new Builder(); 6089 } 6090 clear()6091 public Builder clear() { 6092 super.clear(); 6093 type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; 6094 bitField0_ = (bitField0_ & ~0x00000001); 6095 value_ = com.google.protobuf.ByteString.EMPTY; 6096 bitField0_ = (bitField0_ & ~0x00000002); 6097 return this; 6098 } 6099 clone()6100 public Builder clone() { 6101 return create().mergeFrom(buildPartial()); 6102 } 6103 6104 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6105 getDescriptorForType() { 6106 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; 6107 } 6108 getDefaultInstanceForType()6109 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { 6110 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 6111 } 6112 build()6113 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier build() { 6114 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); 6115 if (!result.isInitialized()) { 6116 throw newUninitializedMessageException(result); 6117 } 6118 return result; 6119 } 6120 buildPartial()6121 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { 6122 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier(this); 6123 int from_bitField0_ = bitField0_; 6124 int to_bitField0_ = 0; 6125 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 6126 to_bitField0_ |= 0x00000001; 6127 } 6128 result.type_ = type_; 6129 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 6130 to_bitField0_ |= 0x00000002; 6131 } 6132 result.value_ = value_; 6133 result.bitField0_ = to_bitField0_; 6134 onBuilt(); 6135 return result; 6136 } 6137 mergeFrom(com.google.protobuf.Message other)6138 public Builder mergeFrom(com.google.protobuf.Message other) { 6139 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) { 6140 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)other); 6141 } else { 6142 super.mergeFrom(other); 6143 return this; 6144 } 6145 } 6146 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other)6147 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other) { 6148 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; 6149 if (other.hasType()) { 6150 setType(other.getType()); 6151 } 6152 if (other.hasValue()) { 6153 setValue(other.getValue()); 6154 } 6155 this.mergeUnknownFields(other.getUnknownFields()); 6156 return this; 6157 } 6158 isInitialized()6159 public final boolean isInitialized() { 6160 if (!hasType()) { 6161 6162 return false; 6163 } 6164 if (!hasValue()) { 6165 6166 return false; 6167 } 6168 return true; 6169 } 6170 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6171 public Builder mergeFrom( 6172 com.google.protobuf.CodedInputStream input, 6173 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6174 throws java.io.IOException { 6175 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parsedMessage = null; 6176 try { 6177 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6178 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6179 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) e.getUnfinishedMessage(); 6180 throw e; 6181 } finally { 6182 if (parsedMessage != null) { 6183 mergeFrom(parsedMessage); 6184 } 6185 } 6186 return this; 6187 } 6188 private int bitField0_; 6189 6190 // required .RegionSpecifier.RegionSpecifierType type = 1; 6191 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; 6192 /** 6193 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 6194 */ hasType()6195 public boolean hasType() { 6196 return ((bitField0_ & 0x00000001) == 0x00000001); 6197 } 6198 /** 6199 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 6200 */ getType()6201 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { 6202 return type_; 6203 } 6204 /** 6205 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 6206 */ setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value)6207 public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { 6208 if (value == null) { 6209 throw new NullPointerException(); 6210 } 6211 bitField0_ |= 0x00000001; 6212 type_ = value; 6213 onChanged(); 6214 return this; 6215 } 6216 /** 6217 * <code>required .RegionSpecifier.RegionSpecifierType type = 1;</code> 6218 */ clearType()6219 public Builder clearType() { 6220 bitField0_ = (bitField0_ & ~0x00000001); 6221 type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; 6222 onChanged(); 6223 return this; 6224 } 6225 6226 // required bytes value = 2; 6227 private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; 6228 /** 6229 * <code>required bytes value = 2;</code> 6230 */ hasValue()6231 public boolean hasValue() { 6232 return ((bitField0_ & 0x00000002) == 0x00000002); 6233 } 6234 /** 6235 * <code>required bytes value = 2;</code> 6236 */ getValue()6237 public com.google.protobuf.ByteString getValue() { 6238 return value_; 6239 } 6240 /** 6241 * <code>required bytes value = 2;</code> 6242 */ setValue(com.google.protobuf.ByteString value)6243 public Builder setValue(com.google.protobuf.ByteString value) { 6244 if (value == null) { 6245 throw new NullPointerException(); 6246 } 6247 bitField0_ |= 0x00000002; 6248 value_ = value; 6249 onChanged(); 6250 return this; 6251 } 6252 /** 6253 * <code>required bytes value = 2;</code> 6254 */ clearValue()6255 public Builder clearValue() { 6256 bitField0_ = (bitField0_ & ~0x00000002); 6257 value_ = getDefaultInstance().getValue(); 6258 onChanged(); 6259 return this; 6260 } 6261 6262 // @@protoc_insertion_point(builder_scope:RegionSpecifier) 6263 } 6264 6265 static { 6266 defaultInstance = new RegionSpecifier(true); defaultInstance.initFields()6267 defaultInstance.initFields(); 6268 } 6269 6270 // @@protoc_insertion_point(class_scope:RegionSpecifier) 6271 } 6272 6273 public interface TimeRangeOrBuilder 6274 extends com.google.protobuf.MessageOrBuilder { 6275 6276 // optional uint64 from = 1; 6277 /** 6278 * <code>optional uint64 from = 1;</code> 6279 */ hasFrom()6280 boolean hasFrom(); 6281 /** 6282 * <code>optional uint64 from = 1;</code> 6283 */ getFrom()6284 long getFrom(); 6285 6286 // optional uint64 to = 2; 6287 /** 6288 * <code>optional uint64 to = 2;</code> 6289 */ hasTo()6290 boolean hasTo(); 6291 /** 6292 * <code>optional uint64 to = 2;</code> 6293 */ getTo()6294 long getTo(); 6295 } 6296 /** 6297 * Protobuf type {@code TimeRange} 6298 * 6299 * <pre> 6300 ** 6301 * A range of time. Both from and to are Java time 6302 * stamp in milliseconds. If you don't specify a time 6303 * range, it means all time. By default, if not 6304 * specified, from = 0, and to = Long.MAX_VALUE 6305 * </pre> 6306 */ 6307 public static final class TimeRange extends 6308 com.google.protobuf.GeneratedMessage 6309 implements TimeRangeOrBuilder { 6310 // Use TimeRange.newBuilder() to construct. TimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder)6311 private TimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 6312 super(builder); 6313 this.unknownFields = builder.getUnknownFields(); 6314 } TimeRange(boolean noInit)6315 private TimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 6316 6317 private static final TimeRange defaultInstance; getDefaultInstance()6318 public static TimeRange getDefaultInstance() { 6319 return defaultInstance; 6320 } 6321 getDefaultInstanceForType()6322 public TimeRange getDefaultInstanceForType() { 6323 return defaultInstance; 6324 } 6325 6326 private final com.google.protobuf.UnknownFieldSet unknownFields; 6327 @java.lang.Override 6328 public final com.google.protobuf.UnknownFieldSet getUnknownFields()6329 getUnknownFields() { 6330 return this.unknownFields; 6331 } TimeRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6332 private TimeRange( 6333 com.google.protobuf.CodedInputStream input, 6334 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6335 throws com.google.protobuf.InvalidProtocolBufferException { 6336 initFields(); 6337 int mutable_bitField0_ = 0; 6338 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6339 com.google.protobuf.UnknownFieldSet.newBuilder(); 6340 try { 6341 boolean done = false; 6342 while (!done) { 6343 int tag = input.readTag(); 6344 switch (tag) { 6345 case 0: 6346 done = true; 6347 break; 6348 default: { 6349 if (!parseUnknownField(input, unknownFields, 6350 extensionRegistry, tag)) { 6351 done = true; 6352 } 6353 break; 6354 } 6355 case 8: { 6356 bitField0_ |= 0x00000001; 6357 from_ = input.readUInt64(); 6358 break; 6359 } 6360 case 16: { 6361 bitField0_ |= 0x00000002; 6362 to_ = input.readUInt64(); 6363 break; 6364 } 6365 } 6366 } 6367 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6368 throw e.setUnfinishedMessage(this); 6369 } catch (java.io.IOException e) { 6370 throw new com.google.protobuf.InvalidProtocolBufferException( 6371 e.getMessage()).setUnfinishedMessage(this); 6372 } finally { 6373 this.unknownFields = unknownFields.build(); 6374 makeExtensionsImmutable(); 6375 } 6376 } 6377 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6378 getDescriptor() { 6379 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; 6380 } 6381 6382 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6383 internalGetFieldAccessorTable() { 6384 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable 6385 .ensureFieldAccessorsInitialized( 6386 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); 6387 } 6388 6389 public static com.google.protobuf.Parser<TimeRange> PARSER = 6390 new com.google.protobuf.AbstractParser<TimeRange>() { 6391 public TimeRange parsePartialFrom( 6392 com.google.protobuf.CodedInputStream input, 6393 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6394 throws com.google.protobuf.InvalidProtocolBufferException { 6395 return new TimeRange(input, extensionRegistry); 6396 } 6397 }; 6398 6399 @java.lang.Override getParserForType()6400 public com.google.protobuf.Parser<TimeRange> getParserForType() { 6401 return PARSER; 6402 } 6403 6404 private int bitField0_; 6405 // optional uint64 from = 1; 6406 public static final int FROM_FIELD_NUMBER = 1; 6407 private long from_; 6408 /** 6409 * <code>optional uint64 from = 1;</code> 6410 */ hasFrom()6411 public boolean hasFrom() { 6412 return ((bitField0_ & 0x00000001) == 0x00000001); 6413 } 6414 /** 6415 * <code>optional uint64 from = 1;</code> 6416 */ getFrom()6417 public long getFrom() { 6418 return from_; 6419 } 6420 6421 // optional uint64 to = 2; 6422 public static final int TO_FIELD_NUMBER = 2; 6423 private long to_; 6424 /** 6425 * <code>optional uint64 to = 2;</code> 6426 */ hasTo()6427 public boolean hasTo() { 6428 return ((bitField0_ & 0x00000002) == 0x00000002); 6429 } 6430 /** 6431 * <code>optional uint64 to = 2;</code> 6432 */ getTo()6433 public long getTo() { 6434 return to_; 6435 } 6436 initFields()6437 private void initFields() { 6438 from_ = 0L; 6439 to_ = 0L; 6440 } 6441 private byte memoizedIsInitialized = -1; isInitialized()6442 public final boolean isInitialized() { 6443 byte isInitialized = memoizedIsInitialized; 6444 if (isInitialized != -1) return isInitialized == 1; 6445 6446 memoizedIsInitialized = 1; 6447 return true; 6448 } 6449 writeTo(com.google.protobuf.CodedOutputStream output)6450 public void writeTo(com.google.protobuf.CodedOutputStream output) 6451 throws java.io.IOException { 6452 getSerializedSize(); 6453 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6454 output.writeUInt64(1, from_); 6455 } 6456 if (((bitField0_ & 0x00000002) == 0x00000002)) { 6457 output.writeUInt64(2, to_); 6458 } 6459 getUnknownFields().writeTo(output); 6460 } 6461 6462 private int memoizedSerializedSize = -1; getSerializedSize()6463 public int getSerializedSize() { 6464 int size = memoizedSerializedSize; 6465 if (size != -1) return size; 6466 6467 size = 0; 6468 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6469 size += com.google.protobuf.CodedOutputStream 6470 .computeUInt64Size(1, from_); 6471 } 6472 if (((bitField0_ & 0x00000002) == 0x00000002)) { 6473 size += com.google.protobuf.CodedOutputStream 6474 .computeUInt64Size(2, to_); 6475 } 6476 size += getUnknownFields().getSerializedSize(); 6477 memoizedSerializedSize = size; 6478 return size; 6479 } 6480 6481 private static final long serialVersionUID = 0L; 6482 @java.lang.Override writeReplace()6483 protected java.lang.Object writeReplace() 6484 throws java.io.ObjectStreamException { 6485 return super.writeReplace(); 6486 } 6487 6488 @java.lang.Override equals(final java.lang.Object obj)6489 public boolean equals(final java.lang.Object obj) { 6490 if (obj == this) { 6491 return true; 6492 } 6493 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)) { 6494 return super.equals(obj); 6495 } 6496 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) obj; 6497 6498 boolean result = true; 6499 result = result && (hasFrom() == other.hasFrom()); 6500 if (hasFrom()) { 6501 result = result && (getFrom() 6502 == other.getFrom()); 6503 } 6504 result = result && (hasTo() == other.hasTo()); 6505 if (hasTo()) { 6506 result = result && (getTo() 6507 == other.getTo()); 6508 } 6509 result = result && 6510 getUnknownFields().equals(other.getUnknownFields()); 6511 return result; 6512 } 6513 6514 private int memoizedHashCode = 0; 6515 @java.lang.Override hashCode()6516 public int hashCode() { 6517 if (memoizedHashCode != 0) { 6518 return memoizedHashCode; 6519 } 6520 int hash = 41; 6521 hash = (19 * hash) + getDescriptorForType().hashCode(); 6522 if (hasFrom()) { 6523 hash = (37 * hash) + FROM_FIELD_NUMBER; 6524 hash = (53 * hash) + hashLong(getFrom()); 6525 } 6526 if (hasTo()) { 6527 hash = (37 * hash) + TO_FIELD_NUMBER; 6528 hash = (53 * hash) + hashLong(getTo()); 6529 } 6530 hash = (29 * hash) + getUnknownFields().hashCode(); 6531 memoizedHashCode = hash; 6532 return hash; 6533 } 6534 parseFrom( com.google.protobuf.ByteString data)6535 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( 6536 com.google.protobuf.ByteString data) 6537 throws com.google.protobuf.InvalidProtocolBufferException { 6538 return PARSER.parseFrom(data); 6539 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6540 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( 6541 com.google.protobuf.ByteString data, 6542 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6543 throws com.google.protobuf.InvalidProtocolBufferException { 6544 return PARSER.parseFrom(data, extensionRegistry); 6545 } parseFrom(byte[] data)6546 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) 6547 throws com.google.protobuf.InvalidProtocolBufferException { 6548 return PARSER.parseFrom(data); 6549 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6550 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( 6551 byte[] data, 6552 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6553 throws com.google.protobuf.InvalidProtocolBufferException { 6554 return PARSER.parseFrom(data, extensionRegistry); 6555 } parseFrom(java.io.InputStream input)6556 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) 6557 throws java.io.IOException { 6558 return PARSER.parseFrom(input); 6559 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6560 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( 6561 java.io.InputStream input, 6562 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6563 throws java.io.IOException { 6564 return PARSER.parseFrom(input, extensionRegistry); 6565 } parseDelimitedFrom(java.io.InputStream input)6566 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) 6567 throws java.io.IOException { 6568 return PARSER.parseDelimitedFrom(input); 6569 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6570 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( 6571 java.io.InputStream input, 6572 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6573 throws java.io.IOException { 6574 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6575 } parseFrom( com.google.protobuf.CodedInputStream input)6576 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( 6577 com.google.protobuf.CodedInputStream input) 6578 throws java.io.IOException { 6579 return PARSER.parseFrom(input); 6580 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6581 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( 6582 com.google.protobuf.CodedInputStream input, 6583 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6584 throws java.io.IOException { 6585 return PARSER.parseFrom(input, extensionRegistry); 6586 } 6587 newBuilder()6588 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6589 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype)6590 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype) { 6591 return newBuilder().mergeFrom(prototype); 6592 } toBuilder()6593 public Builder toBuilder() { return newBuilder(this); } 6594 6595 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6596 protected Builder newBuilderForType( 6597 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6598 Builder builder = new Builder(parent); 6599 return builder; 6600 } 6601 /** 6602 * Protobuf type {@code TimeRange} 6603 * 6604 * <pre> 6605 ** 6606 * A range of time. Both from and to are Java time 6607 * stamp in milliseconds. If you don't specify a time 6608 * range, it means all time. By default, if not 6609 * specified, from = 0, and to = Long.MAX_VALUE 6610 * </pre> 6611 */ 6612 public static final class Builder extends 6613 com.google.protobuf.GeneratedMessage.Builder<Builder> 6614 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { 6615 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6616 getDescriptor() { 6617 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; 6618 } 6619 6620 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6621 internalGetFieldAccessorTable() { 6622 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable 6623 .ensureFieldAccessorsInitialized( 6624 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); 6625 } 6626 6627 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder() Builder()6628 private Builder() { 6629 maybeForceBuilderInitialization(); 6630 } 6631 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6632 private Builder( 6633 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6634 super(parent); 6635 maybeForceBuilderInitialization(); 6636 } maybeForceBuilderInitialization()6637 private void maybeForceBuilderInitialization() { 6638 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6639 } 6640 } create()6641 private static Builder create() { 6642 return new Builder(); 6643 } 6644 clear()6645 public Builder clear() { 6646 super.clear(); 6647 from_ = 0L; 6648 bitField0_ = (bitField0_ & ~0x00000001); 6649 to_ = 0L; 6650 bitField0_ = (bitField0_ & ~0x00000002); 6651 return this; 6652 } 6653 clone()6654 public Builder clone() { 6655 return create().mergeFrom(buildPartial()); 6656 } 6657 6658 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6659 getDescriptorForType() { 6660 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; 6661 } 6662 getDefaultInstanceForType()6663 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { 6664 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 6665 } 6666 build()6667 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange build() { 6668 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); 6669 if (!result.isInitialized()) { 6670 throw newUninitializedMessageException(result); 6671 } 6672 return result; 6673 } 6674 buildPartial()6675 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildPartial() { 6676 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange(this); 6677 int from_bitField0_ = bitField0_; 6678 int to_bitField0_ = 0; 6679 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 6680 to_bitField0_ |= 0x00000001; 6681 } 6682 result.from_ = from_; 6683 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 6684 to_bitField0_ |= 0x00000002; 6685 } 6686 result.to_ = to_; 6687 result.bitField0_ = to_bitField0_; 6688 onBuilt(); 6689 return result; 6690 } 6691 mergeFrom(com.google.protobuf.Message other)6692 public Builder mergeFrom(com.google.protobuf.Message other) { 6693 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) { 6694 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)other); 6695 } else { 6696 super.mergeFrom(other); 6697 return this; 6698 } 6699 } 6700 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other)6701 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other) { 6702 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; 6703 if (other.hasFrom()) { 6704 setFrom(other.getFrom()); 6705 } 6706 if (other.hasTo()) { 6707 setTo(other.getTo()); 6708 } 6709 this.mergeUnknownFields(other.getUnknownFields()); 6710 return this; 6711 } 6712 isInitialized()6713 public final boolean isInitialized() { 6714 return true; 6715 } 6716 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6717 public Builder mergeFrom( 6718 com.google.protobuf.CodedInputStream input, 6719 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6720 throws java.io.IOException { 6721 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parsedMessage = null; 6722 try { 6723 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6724 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6725 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) e.getUnfinishedMessage(); 6726 throw e; 6727 } finally { 6728 if (parsedMessage != null) { 6729 mergeFrom(parsedMessage); 6730 } 6731 } 6732 return this; 6733 } 6734 private int bitField0_; 6735 6736 // optional uint64 from = 1; 6737 private long from_ ; 6738 /** 6739 * <code>optional uint64 from = 1;</code> 6740 */ hasFrom()6741 public boolean hasFrom() { 6742 return ((bitField0_ & 0x00000001) == 0x00000001); 6743 } 6744 /** 6745 * <code>optional uint64 from = 1;</code> 6746 */ getFrom()6747 public long getFrom() { 6748 return from_; 6749 } 6750 /** 6751 * <code>optional uint64 from = 1;</code> 6752 */ setFrom(long value)6753 public Builder setFrom(long value) { 6754 bitField0_ |= 0x00000001; 6755 from_ = value; 6756 onChanged(); 6757 return this; 6758 } 6759 /** 6760 * <code>optional uint64 from = 1;</code> 6761 */ clearFrom()6762 public Builder clearFrom() { 6763 bitField0_ = (bitField0_ & ~0x00000001); 6764 from_ = 0L; 6765 onChanged(); 6766 return this; 6767 } 6768 6769 // optional uint64 to = 2; 6770 private long to_ ; 6771 /** 6772 * <code>optional uint64 to = 2;</code> 6773 */ hasTo()6774 public boolean hasTo() { 6775 return ((bitField0_ & 0x00000002) == 0x00000002); 6776 } 6777 /** 6778 * <code>optional uint64 to = 2;</code> 6779 */ getTo()6780 public long getTo() { 6781 return to_; 6782 } 6783 /** 6784 * <code>optional uint64 to = 2;</code> 6785 */ setTo(long value)6786 public Builder setTo(long value) { 6787 bitField0_ |= 0x00000002; 6788 to_ = value; 6789 onChanged(); 6790 return this; 6791 } 6792 /** 6793 * <code>optional uint64 to = 2;</code> 6794 */ clearTo()6795 public Builder clearTo() { 6796 bitField0_ = (bitField0_ & ~0x00000002); 6797 to_ = 0L; 6798 onChanged(); 6799 return this; 6800 } 6801 6802 // @@protoc_insertion_point(builder_scope:TimeRange) 6803 } 6804 6805 static { 6806 defaultInstance = new TimeRange(true); defaultInstance.initFields()6807 defaultInstance.initFields(); 6808 } 6809 6810 // @@protoc_insertion_point(class_scope:TimeRange) 6811 } 6812 6813 public interface ColumnFamilyTimeRangeOrBuilder 6814 extends com.google.protobuf.MessageOrBuilder { 6815 6816 // required bytes column_family = 1; 6817 /** 6818 * <code>required bytes column_family = 1;</code> 6819 */ hasColumnFamily()6820 boolean hasColumnFamily(); 6821 /** 6822 * <code>required bytes column_family = 1;</code> 6823 */ getColumnFamily()6824 com.google.protobuf.ByteString getColumnFamily(); 6825 6826 // required .TimeRange time_range = 2; 6827 /** 6828 * <code>required .TimeRange time_range = 2;</code> 6829 */ hasTimeRange()6830 boolean hasTimeRange(); 6831 /** 6832 * <code>required .TimeRange time_range = 2;</code> 6833 */ getTimeRange()6834 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); 6835 /** 6836 * <code>required .TimeRange time_range = 2;</code> 6837 */ getTimeRangeOrBuilder()6838 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); 6839 } 6840 /** 6841 * Protobuf type {@code ColumnFamilyTimeRange} 6842 * 6843 * <pre> 6844 * ColumnFamily Specific TimeRange 6845 * </pre> 6846 */ 6847 public static final class ColumnFamilyTimeRange extends 6848 com.google.protobuf.GeneratedMessage 6849 implements ColumnFamilyTimeRangeOrBuilder { 6850 // Use ColumnFamilyTimeRange.newBuilder() to construct. ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder)6851 private ColumnFamilyTimeRange(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 6852 super(builder); 6853 this.unknownFields = builder.getUnknownFields(); 6854 } ColumnFamilyTimeRange(boolean noInit)6855 private ColumnFamilyTimeRange(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 6856 6857 private static final ColumnFamilyTimeRange defaultInstance; getDefaultInstance()6858 public static ColumnFamilyTimeRange getDefaultInstance() { 6859 return defaultInstance; 6860 } 6861 getDefaultInstanceForType()6862 public ColumnFamilyTimeRange getDefaultInstanceForType() { 6863 return defaultInstance; 6864 } 6865 6866 private final com.google.protobuf.UnknownFieldSet unknownFields; 6867 @java.lang.Override 6868 public final com.google.protobuf.UnknownFieldSet getUnknownFields()6869 getUnknownFields() { 6870 return this.unknownFields; 6871 } ColumnFamilyTimeRange( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6872 private ColumnFamilyTimeRange( 6873 com.google.protobuf.CodedInputStream input, 6874 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6875 throws com.google.protobuf.InvalidProtocolBufferException { 6876 initFields(); 6877 int mutable_bitField0_ = 0; 6878 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6879 com.google.protobuf.UnknownFieldSet.newBuilder(); 6880 try { 6881 boolean done = false; 6882 while (!done) { 6883 int tag = input.readTag(); 6884 switch (tag) { 6885 case 0: 6886 done = true; 6887 break; 6888 default: { 6889 if (!parseUnknownField(input, unknownFields, 6890 extensionRegistry, tag)) { 6891 done = true; 6892 } 6893 break; 6894 } 6895 case 10: { 6896 bitField0_ |= 0x00000001; 6897 columnFamily_ = input.readBytes(); 6898 break; 6899 } 6900 case 18: { 6901 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = null; 6902 if (((bitField0_ & 0x00000002) == 0x00000002)) { 6903 subBuilder = timeRange_.toBuilder(); 6904 } 6905 timeRange_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.PARSER, extensionRegistry); 6906 if (subBuilder != null) { 6907 subBuilder.mergeFrom(timeRange_); 6908 timeRange_ = subBuilder.buildPartial(); 6909 } 6910 bitField0_ |= 0x00000002; 6911 break; 6912 } 6913 } 6914 } 6915 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6916 throw e.setUnfinishedMessage(this); 6917 } catch (java.io.IOException e) { 6918 throw new com.google.protobuf.InvalidProtocolBufferException( 6919 e.getMessage()).setUnfinishedMessage(this); 6920 } finally { 6921 this.unknownFields = unknownFields.build(); 6922 makeExtensionsImmutable(); 6923 } 6924 } 6925 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6926 getDescriptor() { 6927 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_descriptor; 6928 } 6929 6930 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6931 internalGetFieldAccessorTable() { 6932 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_fieldAccessorTable 6933 .ensureFieldAccessorsInitialized( 6934 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); 6935 } 6936 6937 public static com.google.protobuf.Parser<ColumnFamilyTimeRange> PARSER = 6938 new com.google.protobuf.AbstractParser<ColumnFamilyTimeRange>() { 6939 public ColumnFamilyTimeRange parsePartialFrom( 6940 com.google.protobuf.CodedInputStream input, 6941 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6942 throws com.google.protobuf.InvalidProtocolBufferException { 6943 return new ColumnFamilyTimeRange(input, extensionRegistry); 6944 } 6945 }; 6946 6947 @java.lang.Override getParserForType()6948 public com.google.protobuf.Parser<ColumnFamilyTimeRange> getParserForType() { 6949 return PARSER; 6950 } 6951 6952 private int bitField0_; 6953 // required bytes column_family = 1; 6954 public static final int COLUMN_FAMILY_FIELD_NUMBER = 1; 6955 private com.google.protobuf.ByteString columnFamily_; 6956 /** 6957 * <code>required bytes column_family = 1;</code> 6958 */ hasColumnFamily()6959 public boolean hasColumnFamily() { 6960 return ((bitField0_ & 0x00000001) == 0x00000001); 6961 } 6962 /** 6963 * <code>required bytes column_family = 1;</code> 6964 */ getColumnFamily()6965 public com.google.protobuf.ByteString getColumnFamily() { 6966 return columnFamily_; 6967 } 6968 6969 // required .TimeRange time_range = 2; 6970 public static final int TIME_RANGE_FIELD_NUMBER = 2; 6971 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; 6972 /** 6973 * <code>required .TimeRange time_range = 2;</code> 6974 */ hasTimeRange()6975 public boolean hasTimeRange() { 6976 return ((bitField0_ & 0x00000002) == 0x00000002); 6977 } 6978 /** 6979 * <code>required .TimeRange time_range = 2;</code> 6980 */ getTimeRange()6981 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 6982 return timeRange_; 6983 } 6984 /** 6985 * <code>required .TimeRange time_range = 2;</code> 6986 */ getTimeRangeOrBuilder()6987 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 6988 return timeRange_; 6989 } 6990 initFields()6991 private void initFields() { 6992 columnFamily_ = com.google.protobuf.ByteString.EMPTY; 6993 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 6994 } 6995 private byte memoizedIsInitialized = -1; isInitialized()6996 public final boolean isInitialized() { 6997 byte isInitialized = memoizedIsInitialized; 6998 if (isInitialized != -1) return isInitialized == 1; 6999 7000 if (!hasColumnFamily()) { 7001 memoizedIsInitialized = 0; 7002 return false; 7003 } 7004 if (!hasTimeRange()) { 7005 memoizedIsInitialized = 0; 7006 return false; 7007 } 7008 memoizedIsInitialized = 1; 7009 return true; 7010 } 7011 writeTo(com.google.protobuf.CodedOutputStream output)7012 public void writeTo(com.google.protobuf.CodedOutputStream output) 7013 throws java.io.IOException { 7014 getSerializedSize(); 7015 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7016 output.writeBytes(1, columnFamily_); 7017 } 7018 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7019 output.writeMessage(2, timeRange_); 7020 } 7021 getUnknownFields().writeTo(output); 7022 } 7023 7024 private int memoizedSerializedSize = -1; getSerializedSize()7025 public int getSerializedSize() { 7026 int size = memoizedSerializedSize; 7027 if (size != -1) return size; 7028 7029 size = 0; 7030 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7031 size += com.google.protobuf.CodedOutputStream 7032 .computeBytesSize(1, columnFamily_); 7033 } 7034 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7035 size += com.google.protobuf.CodedOutputStream 7036 .computeMessageSize(2, timeRange_); 7037 } 7038 size += getUnknownFields().getSerializedSize(); 7039 memoizedSerializedSize = size; 7040 return size; 7041 } 7042 7043 private static final long serialVersionUID = 0L; 7044 @java.lang.Override writeReplace()7045 protected java.lang.Object writeReplace() 7046 throws java.io.ObjectStreamException { 7047 return super.writeReplace(); 7048 } 7049 7050 @java.lang.Override equals(final java.lang.Object obj)7051 public boolean equals(final java.lang.Object obj) { 7052 if (obj == this) { 7053 return true; 7054 } 7055 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)) { 7056 return super.equals(obj); 7057 } 7058 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) obj; 7059 7060 boolean result = true; 7061 result = result && (hasColumnFamily() == other.hasColumnFamily()); 7062 if (hasColumnFamily()) { 7063 result = result && getColumnFamily() 7064 .equals(other.getColumnFamily()); 7065 } 7066 result = result && (hasTimeRange() == other.hasTimeRange()); 7067 if (hasTimeRange()) { 7068 result = result && getTimeRange() 7069 .equals(other.getTimeRange()); 7070 } 7071 result = result && 7072 getUnknownFields().equals(other.getUnknownFields()); 7073 return result; 7074 } 7075 7076 private int memoizedHashCode = 0; 7077 @java.lang.Override hashCode()7078 public int hashCode() { 7079 if (memoizedHashCode != 0) { 7080 return memoizedHashCode; 7081 } 7082 int hash = 41; 7083 hash = (19 * hash) + getDescriptorForType().hashCode(); 7084 if (hasColumnFamily()) { 7085 hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; 7086 hash = (53 * hash) + getColumnFamily().hashCode(); 7087 } 7088 if (hasTimeRange()) { 7089 hash = (37 * hash) + TIME_RANGE_FIELD_NUMBER; 7090 hash = (53 * hash) + getTimeRange().hashCode(); 7091 } 7092 hash = (29 * hash) + getUnknownFields().hashCode(); 7093 memoizedHashCode = hash; 7094 return hash; 7095 } 7096 parseFrom( com.google.protobuf.ByteString data)7097 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( 7098 com.google.protobuf.ByteString data) 7099 throws com.google.protobuf.InvalidProtocolBufferException { 7100 return PARSER.parseFrom(data); 7101 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7102 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( 7103 com.google.protobuf.ByteString data, 7104 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7105 throws com.google.protobuf.InvalidProtocolBufferException { 7106 return PARSER.parseFrom(data, extensionRegistry); 7107 } parseFrom(byte[] data)7108 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(byte[] data) 7109 throws com.google.protobuf.InvalidProtocolBufferException { 7110 return PARSER.parseFrom(data); 7111 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7112 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( 7113 byte[] data, 7114 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7115 throws com.google.protobuf.InvalidProtocolBufferException { 7116 return PARSER.parseFrom(data, extensionRegistry); 7117 } parseFrom(java.io.InputStream input)7118 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom(java.io.InputStream input) 7119 throws java.io.IOException { 7120 return PARSER.parseFrom(input); 7121 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7122 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( 7123 java.io.InputStream input, 7124 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7125 throws java.io.IOException { 7126 return PARSER.parseFrom(input, extensionRegistry); 7127 } parseDelimitedFrom(java.io.InputStream input)7128 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom(java.io.InputStream input) 7129 throws java.io.IOException { 7130 return PARSER.parseDelimitedFrom(input); 7131 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7132 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseDelimitedFrom( 7133 java.io.InputStream input, 7134 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7135 throws java.io.IOException { 7136 return PARSER.parseDelimitedFrom(input, extensionRegistry); 7137 } parseFrom( com.google.protobuf.CodedInputStream input)7138 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( 7139 com.google.protobuf.CodedInputStream input) 7140 throws java.io.IOException { 7141 return PARSER.parseFrom(input); 7142 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7143 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parseFrom( 7144 com.google.protobuf.CodedInputStream input, 7145 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7146 throws java.io.IOException { 7147 return PARSER.parseFrom(input, extensionRegistry); 7148 } 7149 newBuilder()7150 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()7151 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange prototype)7152 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange prototype) { 7153 return newBuilder().mergeFrom(prototype); 7154 } toBuilder()7155 public Builder toBuilder() { return newBuilder(this); } 7156 7157 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7158 protected Builder newBuilderForType( 7159 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7160 Builder builder = new Builder(parent); 7161 return builder; 7162 } 7163 /** 7164 * Protobuf type {@code ColumnFamilyTimeRange} 7165 * 7166 * <pre> 7167 * ColumnFamily Specific TimeRange 7168 * </pre> 7169 */ 7170 public static final class Builder extends 7171 com.google.protobuf.GeneratedMessage.Builder<Builder> 7172 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRangeOrBuilder { 7173 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7174 getDescriptor() { 7175 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_descriptor; 7176 } 7177 7178 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7179 internalGetFieldAccessorTable() { 7180 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_fieldAccessorTable 7181 .ensureFieldAccessorsInitialized( 7182 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.Builder.class); 7183 } 7184 7185 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.newBuilder() Builder()7186 private Builder() { 7187 maybeForceBuilderInitialization(); 7188 } 7189 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7190 private Builder( 7191 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7192 super(parent); 7193 maybeForceBuilderInitialization(); 7194 } maybeForceBuilderInitialization()7195 private void maybeForceBuilderInitialization() { 7196 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7197 getTimeRangeFieldBuilder(); 7198 } 7199 } create()7200 private static Builder create() { 7201 return new Builder(); 7202 } 7203 clear()7204 public Builder clear() { 7205 super.clear(); 7206 columnFamily_ = com.google.protobuf.ByteString.EMPTY; 7207 bitField0_ = (bitField0_ & ~0x00000001); 7208 if (timeRangeBuilder_ == null) { 7209 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 7210 } else { 7211 timeRangeBuilder_.clear(); 7212 } 7213 bitField0_ = (bitField0_ & ~0x00000002); 7214 return this; 7215 } 7216 clone()7217 public Builder clone() { 7218 return create().mergeFrom(buildPartial()); 7219 } 7220 7221 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7222 getDescriptorForType() { 7223 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ColumnFamilyTimeRange_descriptor; 7224 } 7225 getDefaultInstanceForType()7226 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange getDefaultInstanceForType() { 7227 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance(); 7228 } 7229 build()7230 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange build() { 7231 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = buildPartial(); 7232 if (!result.isInitialized()) { 7233 throw newUninitializedMessageException(result); 7234 } 7235 return result; 7236 } 7237 buildPartial()7238 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange buildPartial() { 7239 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange(this); 7240 int from_bitField0_ = bitField0_; 7241 int to_bitField0_ = 0; 7242 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 7243 to_bitField0_ |= 0x00000001; 7244 } 7245 result.columnFamily_ = columnFamily_; 7246 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 7247 to_bitField0_ |= 0x00000002; 7248 } 7249 if (timeRangeBuilder_ == null) { 7250 result.timeRange_ = timeRange_; 7251 } else { 7252 result.timeRange_ = timeRangeBuilder_.build(); 7253 } 7254 result.bitField0_ = to_bitField0_; 7255 onBuilt(); 7256 return result; 7257 } 7258 mergeFrom(com.google.protobuf.Message other)7259 public Builder mergeFrom(com.google.protobuf.Message other) { 7260 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) { 7261 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange)other); 7262 } else { 7263 super.mergeFrom(other); 7264 return this; 7265 } 7266 } 7267 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other)7268 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange other) { 7269 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange.getDefaultInstance()) return this; 7270 if (other.hasColumnFamily()) { 7271 setColumnFamily(other.getColumnFamily()); 7272 } 7273 if (other.hasTimeRange()) { 7274 mergeTimeRange(other.getTimeRange()); 7275 } 7276 this.mergeUnknownFields(other.getUnknownFields()); 7277 return this; 7278 } 7279 isInitialized()7280 public final boolean isInitialized() { 7281 if (!hasColumnFamily()) { 7282 7283 return false; 7284 } 7285 if (!hasTimeRange()) { 7286 7287 return false; 7288 } 7289 return true; 7290 } 7291 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7292 public Builder mergeFrom( 7293 com.google.protobuf.CodedInputStream input, 7294 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7295 throws java.io.IOException { 7296 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange parsedMessage = null; 7297 try { 7298 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 7299 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7300 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilyTimeRange) e.getUnfinishedMessage(); 7301 throw e; 7302 } finally { 7303 if (parsedMessage != null) { 7304 mergeFrom(parsedMessage); 7305 } 7306 } 7307 return this; 7308 } 7309 private int bitField0_; 7310 7311 // required bytes column_family = 1; 7312 private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; 7313 /** 7314 * <code>required bytes column_family = 1;</code> 7315 */ hasColumnFamily()7316 public boolean hasColumnFamily() { 7317 return ((bitField0_ & 0x00000001) == 0x00000001); 7318 } 7319 /** 7320 * <code>required bytes column_family = 1;</code> 7321 */ getColumnFamily()7322 public com.google.protobuf.ByteString getColumnFamily() { 7323 return columnFamily_; 7324 } 7325 /** 7326 * <code>required bytes column_family = 1;</code> 7327 */ setColumnFamily(com.google.protobuf.ByteString value)7328 public Builder setColumnFamily(com.google.protobuf.ByteString value) { 7329 if (value == null) { 7330 throw new NullPointerException(); 7331 } 7332 bitField0_ |= 0x00000001; 7333 columnFamily_ = value; 7334 onChanged(); 7335 return this; 7336 } 7337 /** 7338 * <code>required bytes column_family = 1;</code> 7339 */ clearColumnFamily()7340 public Builder clearColumnFamily() { 7341 bitField0_ = (bitField0_ & ~0x00000001); 7342 columnFamily_ = getDefaultInstance().getColumnFamily(); 7343 onChanged(); 7344 return this; 7345 } 7346 7347 // required .TimeRange time_range = 2; 7348 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 7349 private com.google.protobuf.SingleFieldBuilder< 7350 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; 7351 /** 7352 * <code>required .TimeRange time_range = 2;</code> 7353 */ hasTimeRange()7354 public boolean hasTimeRange() { 7355 return ((bitField0_ & 0x00000002) == 0x00000002); 7356 } 7357 /** 7358 * <code>required .TimeRange time_range = 2;</code> 7359 */ getTimeRange()7360 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { 7361 if (timeRangeBuilder_ == null) { 7362 return timeRange_; 7363 } else { 7364 return timeRangeBuilder_.getMessage(); 7365 } 7366 } 7367 /** 7368 * <code>required .TimeRange time_range = 2;</code> 7369 */ setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)7370 public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 7371 if (timeRangeBuilder_ == null) { 7372 if (value == null) { 7373 throw new NullPointerException(); 7374 } 7375 timeRange_ = value; 7376 onChanged(); 7377 } else { 7378 timeRangeBuilder_.setMessage(value); 7379 } 7380 bitField0_ |= 0x00000002; 7381 return this; 7382 } 7383 /** 7384 * <code>required .TimeRange time_range = 2;</code> 7385 */ setTimeRange( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue)7386 public Builder setTimeRange( 7387 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { 7388 if (timeRangeBuilder_ == null) { 7389 timeRange_ = builderForValue.build(); 7390 onChanged(); 7391 } else { 7392 timeRangeBuilder_.setMessage(builderForValue.build()); 7393 } 7394 bitField0_ |= 0x00000002; 7395 return this; 7396 } 7397 /** 7398 * <code>required .TimeRange time_range = 2;</code> 7399 */ mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value)7400 public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { 7401 if (timeRangeBuilder_ == null) { 7402 if (((bitField0_ & 0x00000002) == 0x00000002) && 7403 timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { 7404 timeRange_ = 7405 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); 7406 } else { 7407 timeRange_ = value; 7408 } 7409 onChanged(); 7410 } else { 7411 timeRangeBuilder_.mergeFrom(value); 7412 } 7413 bitField0_ |= 0x00000002; 7414 return this; 7415 } 7416 /** 7417 * <code>required .TimeRange time_range = 2;</code> 7418 */ clearTimeRange()7419 public Builder clearTimeRange() { 7420 if (timeRangeBuilder_ == null) { 7421 timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); 7422 onChanged(); 7423 } else { 7424 timeRangeBuilder_.clear(); 7425 } 7426 bitField0_ = (bitField0_ & ~0x00000002); 7427 return this; 7428 } 7429 /** 7430 * <code>required .TimeRange time_range = 2;</code> 7431 */ getTimeRangeBuilder()7432 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { 7433 bitField0_ |= 0x00000002; 7434 onChanged(); 7435 return getTimeRangeFieldBuilder().getBuilder(); 7436 } 7437 /** 7438 * <code>required .TimeRange time_range = 2;</code> 7439 */ getTimeRangeOrBuilder()7440 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { 7441 if (timeRangeBuilder_ != null) { 7442 return timeRangeBuilder_.getMessageOrBuilder(); 7443 } else { 7444 return timeRange_; 7445 } 7446 } 7447 /** 7448 * <code>required .TimeRange time_range = 2;</code> 7449 */ 7450 private com.google.protobuf.SingleFieldBuilder< 7451 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> getTimeRangeFieldBuilder()7452 getTimeRangeFieldBuilder() { 7453 if (timeRangeBuilder_ == null) { 7454 timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< 7455 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( 7456 timeRange_, 7457 getParentForChildren(), 7458 isClean()); 7459 timeRange_ = null; 7460 } 7461 return timeRangeBuilder_; 7462 } 7463 7464 // @@protoc_insertion_point(builder_scope:ColumnFamilyTimeRange) 7465 } 7466 7467 static { 7468 defaultInstance = new ColumnFamilyTimeRange(true); defaultInstance.initFields()7469 defaultInstance.initFields(); 7470 } 7471 7472 // @@protoc_insertion_point(class_scope:ColumnFamilyTimeRange) 7473 } 7474 7475 public interface ServerNameOrBuilder 7476 extends com.google.protobuf.MessageOrBuilder { 7477 7478 // required string host_name = 1; 7479 /** 7480 * <code>required string host_name = 1;</code> 7481 */ hasHostName()7482 boolean hasHostName(); 7483 /** 7484 * <code>required string host_name = 1;</code> 7485 */ getHostName()7486 java.lang.String getHostName(); 7487 /** 7488 * <code>required string host_name = 1;</code> 7489 */ 7490 com.google.protobuf.ByteString getHostNameBytes()7491 getHostNameBytes(); 7492 7493 // optional uint32 port = 2; 7494 /** 7495 * <code>optional uint32 port = 2;</code> 7496 */ hasPort()7497 boolean hasPort(); 7498 /** 7499 * <code>optional uint32 port = 2;</code> 7500 */ getPort()7501 int getPort(); 7502 7503 // optional uint64 start_code = 3; 7504 /** 7505 * <code>optional uint64 start_code = 3;</code> 7506 */ hasStartCode()7507 boolean hasStartCode(); 7508 /** 7509 * <code>optional uint64 start_code = 3;</code> 7510 */ getStartCode()7511 long getStartCode(); 7512 } 7513 /** 7514 * Protobuf type {@code ServerName} 7515 * 7516 * <pre> 7517 ** 7518 * Protocol buffer version of ServerName 7519 * </pre> 7520 */ 7521 public static final class ServerName extends 7522 com.google.protobuf.GeneratedMessage 7523 implements ServerNameOrBuilder { 7524 // Use ServerName.newBuilder() to construct. ServerName(com.google.protobuf.GeneratedMessage.Builder<?> builder)7525 private ServerName(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 7526 super(builder); 7527 this.unknownFields = builder.getUnknownFields(); 7528 } ServerName(boolean noInit)7529 private ServerName(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 7530 7531 private static final ServerName defaultInstance; getDefaultInstance()7532 public static ServerName getDefaultInstance() { 7533 return defaultInstance; 7534 } 7535 getDefaultInstanceForType()7536 public ServerName getDefaultInstanceForType() { 7537 return defaultInstance; 7538 } 7539 7540 private final com.google.protobuf.UnknownFieldSet unknownFields; 7541 @java.lang.Override 7542 public final com.google.protobuf.UnknownFieldSet getUnknownFields()7543 getUnknownFields() { 7544 return this.unknownFields; 7545 } ServerName( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7546 private ServerName( 7547 com.google.protobuf.CodedInputStream input, 7548 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7549 throws com.google.protobuf.InvalidProtocolBufferException { 7550 initFields(); 7551 int mutable_bitField0_ = 0; 7552 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 7553 com.google.protobuf.UnknownFieldSet.newBuilder(); 7554 try { 7555 boolean done = false; 7556 while (!done) { 7557 int tag = input.readTag(); 7558 switch (tag) { 7559 case 0: 7560 done = true; 7561 break; 7562 default: { 7563 if (!parseUnknownField(input, unknownFields, 7564 extensionRegistry, tag)) { 7565 done = true; 7566 } 7567 break; 7568 } 7569 case 10: { 7570 bitField0_ |= 0x00000001; 7571 hostName_ = input.readBytes(); 7572 break; 7573 } 7574 case 16: { 7575 bitField0_ |= 0x00000002; 7576 port_ = input.readUInt32(); 7577 break; 7578 } 7579 case 24: { 7580 bitField0_ |= 0x00000004; 7581 startCode_ = input.readUInt64(); 7582 break; 7583 } 7584 } 7585 } 7586 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7587 throw e.setUnfinishedMessage(this); 7588 } catch (java.io.IOException e) { 7589 throw new com.google.protobuf.InvalidProtocolBufferException( 7590 e.getMessage()).setUnfinishedMessage(this); 7591 } finally { 7592 this.unknownFields = unknownFields.build(); 7593 makeExtensionsImmutable(); 7594 } 7595 } 7596 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7597 getDescriptor() { 7598 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; 7599 } 7600 7601 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7602 internalGetFieldAccessorTable() { 7603 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable 7604 .ensureFieldAccessorsInitialized( 7605 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); 7606 } 7607 7608 public static com.google.protobuf.Parser<ServerName> PARSER = 7609 new com.google.protobuf.AbstractParser<ServerName>() { 7610 public ServerName parsePartialFrom( 7611 com.google.protobuf.CodedInputStream input, 7612 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7613 throws com.google.protobuf.InvalidProtocolBufferException { 7614 return new ServerName(input, extensionRegistry); 7615 } 7616 }; 7617 7618 @java.lang.Override getParserForType()7619 public com.google.protobuf.Parser<ServerName> getParserForType() { 7620 return PARSER; 7621 } 7622 7623 private int bitField0_; 7624 // required string host_name = 1; 7625 public static final int HOST_NAME_FIELD_NUMBER = 1; 7626 private java.lang.Object hostName_; 7627 /** 7628 * <code>required string host_name = 1;</code> 7629 */ hasHostName()7630 public boolean hasHostName() { 7631 return ((bitField0_ & 0x00000001) == 0x00000001); 7632 } 7633 /** 7634 * <code>required string host_name = 1;</code> 7635 */ getHostName()7636 public java.lang.String getHostName() { 7637 java.lang.Object ref = hostName_; 7638 if (ref instanceof java.lang.String) { 7639 return (java.lang.String) ref; 7640 } else { 7641 com.google.protobuf.ByteString bs = 7642 (com.google.protobuf.ByteString) ref; 7643 java.lang.String s = bs.toStringUtf8(); 7644 if (bs.isValidUtf8()) { 7645 hostName_ = s; 7646 } 7647 return s; 7648 } 7649 } 7650 /** 7651 * <code>required string host_name = 1;</code> 7652 */ 7653 public com.google.protobuf.ByteString getHostNameBytes()7654 getHostNameBytes() { 7655 java.lang.Object ref = hostName_; 7656 if (ref instanceof java.lang.String) { 7657 com.google.protobuf.ByteString b = 7658 com.google.protobuf.ByteString.copyFromUtf8( 7659 (java.lang.String) ref); 7660 hostName_ = b; 7661 return b; 7662 } else { 7663 return (com.google.protobuf.ByteString) ref; 7664 } 7665 } 7666 7667 // optional uint32 port = 2; 7668 public static final int PORT_FIELD_NUMBER = 2; 7669 private int port_; 7670 /** 7671 * <code>optional uint32 port = 2;</code> 7672 */ hasPort()7673 public boolean hasPort() { 7674 return ((bitField0_ & 0x00000002) == 0x00000002); 7675 } 7676 /** 7677 * <code>optional uint32 port = 2;</code> 7678 */ getPort()7679 public int getPort() { 7680 return port_; 7681 } 7682 7683 // optional uint64 start_code = 3; 7684 public static final int START_CODE_FIELD_NUMBER = 3; 7685 private long startCode_; 7686 /** 7687 * <code>optional uint64 start_code = 3;</code> 7688 */ hasStartCode()7689 public boolean hasStartCode() { 7690 return ((bitField0_ & 0x00000004) == 0x00000004); 7691 } 7692 /** 7693 * <code>optional uint64 start_code = 3;</code> 7694 */ getStartCode()7695 public long getStartCode() { 7696 return startCode_; 7697 } 7698 initFields()7699 private void initFields() { 7700 hostName_ = ""; 7701 port_ = 0; 7702 startCode_ = 0L; 7703 } 7704 private byte memoizedIsInitialized = -1; isInitialized()7705 public final boolean isInitialized() { 7706 byte isInitialized = memoizedIsInitialized; 7707 if (isInitialized != -1) return isInitialized == 1; 7708 7709 if (!hasHostName()) { 7710 memoizedIsInitialized = 0; 7711 return false; 7712 } 7713 memoizedIsInitialized = 1; 7714 return true; 7715 } 7716 writeTo(com.google.protobuf.CodedOutputStream output)7717 public void writeTo(com.google.protobuf.CodedOutputStream output) 7718 throws java.io.IOException { 7719 getSerializedSize(); 7720 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7721 output.writeBytes(1, getHostNameBytes()); 7722 } 7723 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7724 output.writeUInt32(2, port_); 7725 } 7726 if (((bitField0_ & 0x00000004) == 0x00000004)) { 7727 output.writeUInt64(3, startCode_); 7728 } 7729 getUnknownFields().writeTo(output); 7730 } 7731 7732 private int memoizedSerializedSize = -1; getSerializedSize()7733 public int getSerializedSize() { 7734 int size = memoizedSerializedSize; 7735 if (size != -1) return size; 7736 7737 size = 0; 7738 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7739 size += com.google.protobuf.CodedOutputStream 7740 .computeBytesSize(1, getHostNameBytes()); 7741 } 7742 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7743 size += com.google.protobuf.CodedOutputStream 7744 .computeUInt32Size(2, port_); 7745 } 7746 if (((bitField0_ & 0x00000004) == 0x00000004)) { 7747 size += com.google.protobuf.CodedOutputStream 7748 .computeUInt64Size(3, startCode_); 7749 } 7750 size += getUnknownFields().getSerializedSize(); 7751 memoizedSerializedSize = size; 7752 return size; 7753 } 7754 7755 private static final long serialVersionUID = 0L; 7756 @java.lang.Override writeReplace()7757 protected java.lang.Object writeReplace() 7758 throws java.io.ObjectStreamException { 7759 return super.writeReplace(); 7760 } 7761 7762 @java.lang.Override equals(final java.lang.Object obj)7763 public boolean equals(final java.lang.Object obj) { 7764 if (obj == this) { 7765 return true; 7766 } 7767 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)) { 7768 return super.equals(obj); 7769 } 7770 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) obj; 7771 7772 boolean result = true; 7773 result = result && (hasHostName() == other.hasHostName()); 7774 if (hasHostName()) { 7775 result = result && getHostName() 7776 .equals(other.getHostName()); 7777 } 7778 result = result && (hasPort() == other.hasPort()); 7779 if (hasPort()) { 7780 result = result && (getPort() 7781 == other.getPort()); 7782 } 7783 result = result && (hasStartCode() == other.hasStartCode()); 7784 if (hasStartCode()) { 7785 result = result && (getStartCode() 7786 == other.getStartCode()); 7787 } 7788 result = result && 7789 getUnknownFields().equals(other.getUnknownFields()); 7790 return result; 7791 } 7792 7793 private int memoizedHashCode = 0; 7794 @java.lang.Override hashCode()7795 public int hashCode() { 7796 if (memoizedHashCode != 0) { 7797 return memoizedHashCode; 7798 } 7799 int hash = 41; 7800 hash = (19 * hash) + getDescriptorForType().hashCode(); 7801 if (hasHostName()) { 7802 hash = (37 * hash) + HOST_NAME_FIELD_NUMBER; 7803 hash = (53 * hash) + getHostName().hashCode(); 7804 } 7805 if (hasPort()) { 7806 hash = (37 * hash) + PORT_FIELD_NUMBER; 7807 hash = (53 * hash) + getPort(); 7808 } 7809 if (hasStartCode()) { 7810 hash = (37 * hash) + START_CODE_FIELD_NUMBER; 7811 hash = (53 * hash) + hashLong(getStartCode()); 7812 } 7813 hash = (29 * hash) + getUnknownFields().hashCode(); 7814 memoizedHashCode = hash; 7815 return hash; 7816 } 7817 parseFrom( com.google.protobuf.ByteString data)7818 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( 7819 com.google.protobuf.ByteString data) 7820 throws com.google.protobuf.InvalidProtocolBufferException { 7821 return PARSER.parseFrom(data); 7822 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7823 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( 7824 com.google.protobuf.ByteString data, 7825 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7826 throws com.google.protobuf.InvalidProtocolBufferException { 7827 return PARSER.parseFrom(data, extensionRegistry); 7828 } parseFrom(byte[] data)7829 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) 7830 throws com.google.protobuf.InvalidProtocolBufferException { 7831 return PARSER.parseFrom(data); 7832 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7833 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( 7834 byte[] data, 7835 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7836 throws com.google.protobuf.InvalidProtocolBufferException { 7837 return PARSER.parseFrom(data, extensionRegistry); 7838 } parseFrom(java.io.InputStream input)7839 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) 7840 throws java.io.IOException { 7841 return PARSER.parseFrom(input); 7842 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7843 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( 7844 java.io.InputStream input, 7845 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7846 throws java.io.IOException { 7847 return PARSER.parseFrom(input, extensionRegistry); 7848 } parseDelimitedFrom(java.io.InputStream input)7849 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) 7850 throws java.io.IOException { 7851 return PARSER.parseDelimitedFrom(input); 7852 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7853 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( 7854 java.io.InputStream input, 7855 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7856 throws java.io.IOException { 7857 return PARSER.parseDelimitedFrom(input, extensionRegistry); 7858 } parseFrom( com.google.protobuf.CodedInputStream input)7859 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( 7860 com.google.protobuf.CodedInputStream input) 7861 throws java.io.IOException { 7862 return PARSER.parseFrom(input); 7863 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7864 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( 7865 com.google.protobuf.CodedInputStream input, 7866 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7867 throws java.io.IOException { 7868 return PARSER.parseFrom(input, extensionRegistry); 7869 } 7870 newBuilder()7871 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()7872 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype)7873 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype) { 7874 return newBuilder().mergeFrom(prototype); 7875 } toBuilder()7876 public Builder toBuilder() { return newBuilder(this); } 7877 7878 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7879 protected Builder newBuilderForType( 7880 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7881 Builder builder = new Builder(parent); 7882 return builder; 7883 } 7884 /** 7885 * Protobuf type {@code ServerName} 7886 * 7887 * <pre> 7888 ** 7889 * Protocol buffer version of ServerName 7890 * </pre> 7891 */ 7892 public static final class Builder extends 7893 com.google.protobuf.GeneratedMessage.Builder<Builder> 7894 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder { 7895 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7896 getDescriptor() { 7897 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; 7898 } 7899 7900 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7901 internalGetFieldAccessorTable() { 7902 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable 7903 .ensureFieldAccessorsInitialized( 7904 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); 7905 } 7906 7907 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder() Builder()7908 private Builder() { 7909 maybeForceBuilderInitialization(); 7910 } 7911 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7912 private Builder( 7913 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7914 super(parent); 7915 maybeForceBuilderInitialization(); 7916 } maybeForceBuilderInitialization()7917 private void maybeForceBuilderInitialization() { 7918 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7919 } 7920 } create()7921 private static Builder create() { 7922 return new Builder(); 7923 } 7924 clear()7925 public Builder clear() { 7926 super.clear(); 7927 hostName_ = ""; 7928 bitField0_ = (bitField0_ & ~0x00000001); 7929 port_ = 0; 7930 bitField0_ = (bitField0_ & ~0x00000002); 7931 startCode_ = 0L; 7932 bitField0_ = (bitField0_ & ~0x00000004); 7933 return this; 7934 } 7935 clone()7936 public Builder clone() { 7937 return create().mergeFrom(buildPartial()); 7938 } 7939 7940 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7941 getDescriptorForType() { 7942 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; 7943 } 7944 getDefaultInstanceForType()7945 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { 7946 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); 7947 } 7948 build()7949 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName build() { 7950 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); 7951 if (!result.isInitialized()) { 7952 throw newUninitializedMessageException(result); 7953 } 7954 return result; 7955 } 7956 buildPartial()7957 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildPartial() { 7958 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName(this); 7959 int from_bitField0_ = bitField0_; 7960 int to_bitField0_ = 0; 7961 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 7962 to_bitField0_ |= 0x00000001; 7963 } 7964 result.hostName_ = hostName_; 7965 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 7966 to_bitField0_ |= 0x00000002; 7967 } 7968 result.port_ = port_; 7969 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 7970 to_bitField0_ |= 0x00000004; 7971 } 7972 result.startCode_ = startCode_; 7973 result.bitField0_ = to_bitField0_; 7974 onBuilt(); 7975 return result; 7976 } 7977 mergeFrom(com.google.protobuf.Message other)7978 public Builder mergeFrom(com.google.protobuf.Message other) { 7979 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) { 7980 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)other); 7981 } else { 7982 super.mergeFrom(other); 7983 return this; 7984 } 7985 } 7986 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other)7987 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other) { 7988 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; 7989 if (other.hasHostName()) { 7990 bitField0_ |= 0x00000001; 7991 hostName_ = other.hostName_; 7992 onChanged(); 7993 } 7994 if (other.hasPort()) { 7995 setPort(other.getPort()); 7996 } 7997 if (other.hasStartCode()) { 7998 setStartCode(other.getStartCode()); 7999 } 8000 this.mergeUnknownFields(other.getUnknownFields()); 8001 return this; 8002 } 8003 isInitialized()8004 public final boolean isInitialized() { 8005 if (!hasHostName()) { 8006 8007 return false; 8008 } 8009 return true; 8010 } 8011 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8012 public Builder mergeFrom( 8013 com.google.protobuf.CodedInputStream input, 8014 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8015 throws java.io.IOException { 8016 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parsedMessage = null; 8017 try { 8018 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 8019 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8020 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) e.getUnfinishedMessage(); 8021 throw e; 8022 } finally { 8023 if (parsedMessage != null) { 8024 mergeFrom(parsedMessage); 8025 } 8026 } 8027 return this; 8028 } 8029 private int bitField0_; 8030 8031 // required string host_name = 1; 8032 private java.lang.Object hostName_ = ""; 8033 /** 8034 * <code>required string host_name = 1;</code> 8035 */ hasHostName()8036 public boolean hasHostName() { 8037 return ((bitField0_ & 0x00000001) == 0x00000001); 8038 } 8039 /** 8040 * <code>required string host_name = 1;</code> 8041 */ getHostName()8042 public java.lang.String getHostName() { 8043 java.lang.Object ref = hostName_; 8044 if (!(ref instanceof java.lang.String)) { 8045 java.lang.String s = ((com.google.protobuf.ByteString) ref) 8046 .toStringUtf8(); 8047 hostName_ = s; 8048 return s; 8049 } else { 8050 return (java.lang.String) ref; 8051 } 8052 } 8053 /** 8054 * <code>required string host_name = 1;</code> 8055 */ 8056 public com.google.protobuf.ByteString getHostNameBytes()8057 getHostNameBytes() { 8058 java.lang.Object ref = hostName_; 8059 if (ref instanceof String) { 8060 com.google.protobuf.ByteString b = 8061 com.google.protobuf.ByteString.copyFromUtf8( 8062 (java.lang.String) ref); 8063 hostName_ = b; 8064 return b; 8065 } else { 8066 return (com.google.protobuf.ByteString) ref; 8067 } 8068 } 8069 /** 8070 * <code>required string host_name = 1;</code> 8071 */ setHostName( java.lang.String value)8072 public Builder setHostName( 8073 java.lang.String value) { 8074 if (value == null) { 8075 throw new NullPointerException(); 8076 } 8077 bitField0_ |= 0x00000001; 8078 hostName_ = value; 8079 onChanged(); 8080 return this; 8081 } 8082 /** 8083 * <code>required string host_name = 1;</code> 8084 */ clearHostName()8085 public Builder clearHostName() { 8086 bitField0_ = (bitField0_ & ~0x00000001); 8087 hostName_ = getDefaultInstance().getHostName(); 8088 onChanged(); 8089 return this; 8090 } 8091 /** 8092 * <code>required string host_name = 1;</code> 8093 */ setHostNameBytes( com.google.protobuf.ByteString value)8094 public Builder setHostNameBytes( 8095 com.google.protobuf.ByteString value) { 8096 if (value == null) { 8097 throw new NullPointerException(); 8098 } 8099 bitField0_ |= 0x00000001; 8100 hostName_ = value; 8101 onChanged(); 8102 return this; 8103 } 8104 8105 // optional uint32 port = 2; 8106 private int port_ ; 8107 /** 8108 * <code>optional uint32 port = 2;</code> 8109 */ hasPort()8110 public boolean hasPort() { 8111 return ((bitField0_ & 0x00000002) == 0x00000002); 8112 } 8113 /** 8114 * <code>optional uint32 port = 2;</code> 8115 */ getPort()8116 public int getPort() { 8117 return port_; 8118 } 8119 /** 8120 * <code>optional uint32 port = 2;</code> 8121 */ setPort(int value)8122 public Builder setPort(int value) { 8123 bitField0_ |= 0x00000002; 8124 port_ = value; 8125 onChanged(); 8126 return this; 8127 } 8128 /** 8129 * <code>optional uint32 port = 2;</code> 8130 */ clearPort()8131 public Builder clearPort() { 8132 bitField0_ = (bitField0_ & ~0x00000002); 8133 port_ = 0; 8134 onChanged(); 8135 return this; 8136 } 8137 8138 // optional uint64 start_code = 3; 8139 private long startCode_ ; 8140 /** 8141 * <code>optional uint64 start_code = 3;</code> 8142 */ hasStartCode()8143 public boolean hasStartCode() { 8144 return ((bitField0_ & 0x00000004) == 0x00000004); 8145 } 8146 /** 8147 * <code>optional uint64 start_code = 3;</code> 8148 */ getStartCode()8149 public long getStartCode() { 8150 return startCode_; 8151 } 8152 /** 8153 * <code>optional uint64 start_code = 3;</code> 8154 */ setStartCode(long value)8155 public Builder setStartCode(long value) { 8156 bitField0_ |= 0x00000004; 8157 startCode_ = value; 8158 onChanged(); 8159 return this; 8160 } 8161 /** 8162 * <code>optional uint64 start_code = 3;</code> 8163 */ clearStartCode()8164 public Builder clearStartCode() { 8165 bitField0_ = (bitField0_ & ~0x00000004); 8166 startCode_ = 0L; 8167 onChanged(); 8168 return this; 8169 } 8170 8171 // @@protoc_insertion_point(builder_scope:ServerName) 8172 } 8173 8174 static { 8175 defaultInstance = new ServerName(true); defaultInstance.initFields()8176 defaultInstance.initFields(); 8177 } 8178 8179 // @@protoc_insertion_point(class_scope:ServerName) 8180 } 8181 8182 public interface CoprocessorOrBuilder 8183 extends com.google.protobuf.MessageOrBuilder { 8184 8185 // required string name = 1; 8186 /** 8187 * <code>required string name = 1;</code> 8188 */ hasName()8189 boolean hasName(); 8190 /** 8191 * <code>required string name = 1;</code> 8192 */ getName()8193 java.lang.String getName(); 8194 /** 8195 * <code>required string name = 1;</code> 8196 */ 8197 com.google.protobuf.ByteString getNameBytes()8198 getNameBytes(); 8199 } 8200 /** 8201 * Protobuf type {@code Coprocessor} 8202 */ 8203 public static final class Coprocessor extends 8204 com.google.protobuf.GeneratedMessage 8205 implements CoprocessorOrBuilder { 8206 // Use Coprocessor.newBuilder() to construct. Coprocessor(com.google.protobuf.GeneratedMessage.Builder<?> builder)8207 private Coprocessor(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8208 super(builder); 8209 this.unknownFields = builder.getUnknownFields(); 8210 } Coprocessor(boolean noInit)8211 private Coprocessor(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8212 8213 private static final Coprocessor defaultInstance; getDefaultInstance()8214 public static Coprocessor getDefaultInstance() { 8215 return defaultInstance; 8216 } 8217 getDefaultInstanceForType()8218 public Coprocessor getDefaultInstanceForType() { 8219 return defaultInstance; 8220 } 8221 8222 private final com.google.protobuf.UnknownFieldSet unknownFields; 8223 @java.lang.Override 8224 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8225 getUnknownFields() { 8226 return this.unknownFields; 8227 } Coprocessor( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8228 private Coprocessor( 8229 com.google.protobuf.CodedInputStream input, 8230 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8231 throws com.google.protobuf.InvalidProtocolBufferException { 8232 initFields(); 8233 int mutable_bitField0_ = 0; 8234 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8235 com.google.protobuf.UnknownFieldSet.newBuilder(); 8236 try { 8237 boolean done = false; 8238 while (!done) { 8239 int tag = input.readTag(); 8240 switch (tag) { 8241 case 0: 8242 done = true; 8243 break; 8244 default: { 8245 if (!parseUnknownField(input, unknownFields, 8246 extensionRegistry, tag)) { 8247 done = true; 8248 } 8249 break; 8250 } 8251 case 10: { 8252 bitField0_ |= 0x00000001; 8253 name_ = input.readBytes(); 8254 break; 8255 } 8256 } 8257 } 8258 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8259 throw e.setUnfinishedMessage(this); 8260 } catch (java.io.IOException e) { 8261 throw new com.google.protobuf.InvalidProtocolBufferException( 8262 e.getMessage()).setUnfinishedMessage(this); 8263 } finally { 8264 this.unknownFields = unknownFields.build(); 8265 makeExtensionsImmutable(); 8266 } 8267 } 8268 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8269 getDescriptor() { 8270 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; 8271 } 8272 8273 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8274 internalGetFieldAccessorTable() { 8275 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable 8276 .ensureFieldAccessorsInitialized( 8277 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); 8278 } 8279 8280 public static com.google.protobuf.Parser<Coprocessor> PARSER = 8281 new com.google.protobuf.AbstractParser<Coprocessor>() { 8282 public Coprocessor parsePartialFrom( 8283 com.google.protobuf.CodedInputStream input, 8284 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8285 throws com.google.protobuf.InvalidProtocolBufferException { 8286 return new Coprocessor(input, extensionRegistry); 8287 } 8288 }; 8289 8290 @java.lang.Override getParserForType()8291 public com.google.protobuf.Parser<Coprocessor> getParserForType() { 8292 return PARSER; 8293 } 8294 8295 private int bitField0_; 8296 // required string name = 1; 8297 public static final int NAME_FIELD_NUMBER = 1; 8298 private java.lang.Object name_; 8299 /** 8300 * <code>required string name = 1;</code> 8301 */ hasName()8302 public boolean hasName() { 8303 return ((bitField0_ & 0x00000001) == 0x00000001); 8304 } 8305 /** 8306 * <code>required string name = 1;</code> 8307 */ getName()8308 public java.lang.String getName() { 8309 java.lang.Object ref = name_; 8310 if (ref instanceof java.lang.String) { 8311 return (java.lang.String) ref; 8312 } else { 8313 com.google.protobuf.ByteString bs = 8314 (com.google.protobuf.ByteString) ref; 8315 java.lang.String s = bs.toStringUtf8(); 8316 if (bs.isValidUtf8()) { 8317 name_ = s; 8318 } 8319 return s; 8320 } 8321 } 8322 /** 8323 * <code>required string name = 1;</code> 8324 */ 8325 public com.google.protobuf.ByteString getNameBytes()8326 getNameBytes() { 8327 java.lang.Object ref = name_; 8328 if (ref instanceof java.lang.String) { 8329 com.google.protobuf.ByteString b = 8330 com.google.protobuf.ByteString.copyFromUtf8( 8331 (java.lang.String) ref); 8332 name_ = b; 8333 return b; 8334 } else { 8335 return (com.google.protobuf.ByteString) ref; 8336 } 8337 } 8338 initFields()8339 private void initFields() { 8340 name_ = ""; 8341 } 8342 private byte memoizedIsInitialized = -1; isInitialized()8343 public final boolean isInitialized() { 8344 byte isInitialized = memoizedIsInitialized; 8345 if (isInitialized != -1) return isInitialized == 1; 8346 8347 if (!hasName()) { 8348 memoizedIsInitialized = 0; 8349 return false; 8350 } 8351 memoizedIsInitialized = 1; 8352 return true; 8353 } 8354 writeTo(com.google.protobuf.CodedOutputStream output)8355 public void writeTo(com.google.protobuf.CodedOutputStream output) 8356 throws java.io.IOException { 8357 getSerializedSize(); 8358 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8359 output.writeBytes(1, getNameBytes()); 8360 } 8361 getUnknownFields().writeTo(output); 8362 } 8363 8364 private int memoizedSerializedSize = -1; getSerializedSize()8365 public int getSerializedSize() { 8366 int size = memoizedSerializedSize; 8367 if (size != -1) return size; 8368 8369 size = 0; 8370 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8371 size += com.google.protobuf.CodedOutputStream 8372 .computeBytesSize(1, getNameBytes()); 8373 } 8374 size += getUnknownFields().getSerializedSize(); 8375 memoizedSerializedSize = size; 8376 return size; 8377 } 8378 8379 private static final long serialVersionUID = 0L; 8380 @java.lang.Override writeReplace()8381 protected java.lang.Object writeReplace() 8382 throws java.io.ObjectStreamException { 8383 return super.writeReplace(); 8384 } 8385 8386 @java.lang.Override equals(final java.lang.Object obj)8387 public boolean equals(final java.lang.Object obj) { 8388 if (obj == this) { 8389 return true; 8390 } 8391 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)) { 8392 return super.equals(obj); 8393 } 8394 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) obj; 8395 8396 boolean result = true; 8397 result = result && (hasName() == other.hasName()); 8398 if (hasName()) { 8399 result = result && getName() 8400 .equals(other.getName()); 8401 } 8402 result = result && 8403 getUnknownFields().equals(other.getUnknownFields()); 8404 return result; 8405 } 8406 8407 private int memoizedHashCode = 0; 8408 @java.lang.Override hashCode()8409 public int hashCode() { 8410 if (memoizedHashCode != 0) { 8411 return memoizedHashCode; 8412 } 8413 int hash = 41; 8414 hash = (19 * hash) + getDescriptorForType().hashCode(); 8415 if (hasName()) { 8416 hash = (37 * hash) + NAME_FIELD_NUMBER; 8417 hash = (53 * hash) + getName().hashCode(); 8418 } 8419 hash = (29 * hash) + getUnknownFields().hashCode(); 8420 memoizedHashCode = hash; 8421 return hash; 8422 } 8423 parseFrom( com.google.protobuf.ByteString data)8424 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( 8425 com.google.protobuf.ByteString data) 8426 throws com.google.protobuf.InvalidProtocolBufferException { 8427 return PARSER.parseFrom(data); 8428 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8429 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( 8430 com.google.protobuf.ByteString data, 8431 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8432 throws com.google.protobuf.InvalidProtocolBufferException { 8433 return PARSER.parseFrom(data, extensionRegistry); 8434 } parseFrom(byte[] data)8435 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(byte[] data) 8436 throws com.google.protobuf.InvalidProtocolBufferException { 8437 return PARSER.parseFrom(data); 8438 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8439 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( 8440 byte[] data, 8441 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8442 throws com.google.protobuf.InvalidProtocolBufferException { 8443 return PARSER.parseFrom(data, extensionRegistry); 8444 } parseFrom(java.io.InputStream input)8445 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom(java.io.InputStream input) 8446 throws java.io.IOException { 8447 return PARSER.parseFrom(input); 8448 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8449 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( 8450 java.io.InputStream input, 8451 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8452 throws java.io.IOException { 8453 return PARSER.parseFrom(input, extensionRegistry); 8454 } parseDelimitedFrom(java.io.InputStream input)8455 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom(java.io.InputStream input) 8456 throws java.io.IOException { 8457 return PARSER.parseDelimitedFrom(input); 8458 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8459 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseDelimitedFrom( 8460 java.io.InputStream input, 8461 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8462 throws java.io.IOException { 8463 return PARSER.parseDelimitedFrom(input, extensionRegistry); 8464 } parseFrom( com.google.protobuf.CodedInputStream input)8465 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( 8466 com.google.protobuf.CodedInputStream input) 8467 throws java.io.IOException { 8468 return PARSER.parseFrom(input); 8469 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8470 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parseFrom( 8471 com.google.protobuf.CodedInputStream input, 8472 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8473 throws java.io.IOException { 8474 return PARSER.parseFrom(input, extensionRegistry); 8475 } 8476 newBuilder()8477 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()8478 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor prototype)8479 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor prototype) { 8480 return newBuilder().mergeFrom(prototype); 8481 } toBuilder()8482 public Builder toBuilder() { return newBuilder(this); } 8483 8484 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8485 protected Builder newBuilderForType( 8486 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8487 Builder builder = new Builder(parent); 8488 return builder; 8489 } 8490 /** 8491 * Protobuf type {@code Coprocessor} 8492 */ 8493 public static final class Builder extends 8494 com.google.protobuf.GeneratedMessage.Builder<Builder> 8495 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder { 8496 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8497 getDescriptor() { 8498 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; 8499 } 8500 8501 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8502 internalGetFieldAccessorTable() { 8503 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_fieldAccessorTable 8504 .ensureFieldAccessorsInitialized( 8505 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder.class); 8506 } 8507 8508 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder() Builder()8509 private Builder() { 8510 maybeForceBuilderInitialization(); 8511 } 8512 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8513 private Builder( 8514 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8515 super(parent); 8516 maybeForceBuilderInitialization(); 8517 } maybeForceBuilderInitialization()8518 private void maybeForceBuilderInitialization() { 8519 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 8520 } 8521 } create()8522 private static Builder create() { 8523 return new Builder(); 8524 } 8525 clear()8526 public Builder clear() { 8527 super.clear(); 8528 name_ = ""; 8529 bitField0_ = (bitField0_ & ~0x00000001); 8530 return this; 8531 } 8532 clone()8533 public Builder clone() { 8534 return create().mergeFrom(buildPartial()); 8535 } 8536 8537 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()8538 getDescriptorForType() { 8539 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_Coprocessor_descriptor; 8540 } 8541 getDefaultInstanceForType()8542 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getDefaultInstanceForType() { 8543 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance(); 8544 } 8545 build()8546 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor build() { 8547 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = buildPartial(); 8548 if (!result.isInitialized()) { 8549 throw newUninitializedMessageException(result); 8550 } 8551 return result; 8552 } 8553 buildPartial()8554 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor buildPartial() { 8555 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor(this); 8556 int from_bitField0_ = bitField0_; 8557 int to_bitField0_ = 0; 8558 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 8559 to_bitField0_ |= 0x00000001; 8560 } 8561 result.name_ = name_; 8562 result.bitField0_ = to_bitField0_; 8563 onBuilt(); 8564 return result; 8565 } 8566 mergeFrom(com.google.protobuf.Message other)8567 public Builder mergeFrom(com.google.protobuf.Message other) { 8568 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) { 8569 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor)other); 8570 } else { 8571 super.mergeFrom(other); 8572 return this; 8573 } 8574 } 8575 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other)8576 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor other) { 8577 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()) return this; 8578 if (other.hasName()) { 8579 bitField0_ |= 0x00000001; 8580 name_ = other.name_; 8581 onChanged(); 8582 } 8583 this.mergeUnknownFields(other.getUnknownFields()); 8584 return this; 8585 } 8586 isInitialized()8587 public final boolean isInitialized() { 8588 if (!hasName()) { 8589 8590 return false; 8591 } 8592 return true; 8593 } 8594 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8595 public Builder mergeFrom( 8596 com.google.protobuf.CodedInputStream input, 8597 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8598 throws java.io.IOException { 8599 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor parsedMessage = null; 8600 try { 8601 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 8602 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8603 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor) e.getUnfinishedMessage(); 8604 throw e; 8605 } finally { 8606 if (parsedMessage != null) { 8607 mergeFrom(parsedMessage); 8608 } 8609 } 8610 return this; 8611 } 8612 private int bitField0_; 8613 8614 // required string name = 1; 8615 private java.lang.Object name_ = ""; 8616 /** 8617 * <code>required string name = 1;</code> 8618 */ hasName()8619 public boolean hasName() { 8620 return ((bitField0_ & 0x00000001) == 0x00000001); 8621 } 8622 /** 8623 * <code>required string name = 1;</code> 8624 */ getName()8625 public java.lang.String getName() { 8626 java.lang.Object ref = name_; 8627 if (!(ref instanceof java.lang.String)) { 8628 java.lang.String s = ((com.google.protobuf.ByteString) ref) 8629 .toStringUtf8(); 8630 name_ = s; 8631 return s; 8632 } else { 8633 return (java.lang.String) ref; 8634 } 8635 } 8636 /** 8637 * <code>required string name = 1;</code> 8638 */ 8639 public com.google.protobuf.ByteString getNameBytes()8640 getNameBytes() { 8641 java.lang.Object ref = name_; 8642 if (ref instanceof String) { 8643 com.google.protobuf.ByteString b = 8644 com.google.protobuf.ByteString.copyFromUtf8( 8645 (java.lang.String) ref); 8646 name_ = b; 8647 return b; 8648 } else { 8649 return (com.google.protobuf.ByteString) ref; 8650 } 8651 } 8652 /** 8653 * <code>required string name = 1;</code> 8654 */ setName( java.lang.String value)8655 public Builder setName( 8656 java.lang.String value) { 8657 if (value == null) { 8658 throw new NullPointerException(); 8659 } 8660 bitField0_ |= 0x00000001; 8661 name_ = value; 8662 onChanged(); 8663 return this; 8664 } 8665 /** 8666 * <code>required string name = 1;</code> 8667 */ clearName()8668 public Builder clearName() { 8669 bitField0_ = (bitField0_ & ~0x00000001); 8670 name_ = getDefaultInstance().getName(); 8671 onChanged(); 8672 return this; 8673 } 8674 /** 8675 * <code>required string name = 1;</code> 8676 */ setNameBytes( com.google.protobuf.ByteString value)8677 public Builder setNameBytes( 8678 com.google.protobuf.ByteString value) { 8679 if (value == null) { 8680 throw new NullPointerException(); 8681 } 8682 bitField0_ |= 0x00000001; 8683 name_ = value; 8684 onChanged(); 8685 return this; 8686 } 8687 8688 // @@protoc_insertion_point(builder_scope:Coprocessor) 8689 } 8690 8691 static { 8692 defaultInstance = new Coprocessor(true); defaultInstance.initFields()8693 defaultInstance.initFields(); 8694 } 8695 8696 // @@protoc_insertion_point(class_scope:Coprocessor) 8697 } 8698 8699 public interface NameStringPairOrBuilder 8700 extends com.google.protobuf.MessageOrBuilder { 8701 8702 // required string name = 1; 8703 /** 8704 * <code>required string name = 1;</code> 8705 */ hasName()8706 boolean hasName(); 8707 /** 8708 * <code>required string name = 1;</code> 8709 */ getName()8710 java.lang.String getName(); 8711 /** 8712 * <code>required string name = 1;</code> 8713 */ 8714 com.google.protobuf.ByteString getNameBytes()8715 getNameBytes(); 8716 8717 // required string value = 2; 8718 /** 8719 * <code>required string value = 2;</code> 8720 */ hasValue()8721 boolean hasValue(); 8722 /** 8723 * <code>required string value = 2;</code> 8724 */ getValue()8725 java.lang.String getValue(); 8726 /** 8727 * <code>required string value = 2;</code> 8728 */ 8729 com.google.protobuf.ByteString getValueBytes()8730 getValueBytes(); 8731 } 8732 /** 8733 * Protobuf type {@code NameStringPair} 8734 */ 8735 public static final class NameStringPair extends 8736 com.google.protobuf.GeneratedMessage 8737 implements NameStringPairOrBuilder { 8738 // Use NameStringPair.newBuilder() to construct. NameStringPair(com.google.protobuf.GeneratedMessage.Builder<?> builder)8739 private NameStringPair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8740 super(builder); 8741 this.unknownFields = builder.getUnknownFields(); 8742 } NameStringPair(boolean noInit)8743 private NameStringPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8744 8745 private static final NameStringPair defaultInstance; getDefaultInstance()8746 public static NameStringPair getDefaultInstance() { 8747 return defaultInstance; 8748 } 8749 getDefaultInstanceForType()8750 public NameStringPair getDefaultInstanceForType() { 8751 return defaultInstance; 8752 } 8753 8754 private final com.google.protobuf.UnknownFieldSet unknownFields; 8755 @java.lang.Override 8756 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8757 getUnknownFields() { 8758 return this.unknownFields; 8759 } NameStringPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8760 private NameStringPair( 8761 com.google.protobuf.CodedInputStream input, 8762 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8763 throws com.google.protobuf.InvalidProtocolBufferException { 8764 initFields(); 8765 int mutable_bitField0_ = 0; 8766 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8767 com.google.protobuf.UnknownFieldSet.newBuilder(); 8768 try { 8769 boolean done = false; 8770 while (!done) { 8771 int tag = input.readTag(); 8772 switch (tag) { 8773 case 0: 8774 done = true; 8775 break; 8776 default: { 8777 if (!parseUnknownField(input, unknownFields, 8778 extensionRegistry, tag)) { 8779 done = true; 8780 } 8781 break; 8782 } 8783 case 10: { 8784 bitField0_ |= 0x00000001; 8785 name_ = input.readBytes(); 8786 break; 8787 } 8788 case 18: { 8789 bitField0_ |= 0x00000002; 8790 value_ = input.readBytes(); 8791 break; 8792 } 8793 } 8794 } 8795 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8796 throw e.setUnfinishedMessage(this); 8797 } catch (java.io.IOException e) { 8798 throw new com.google.protobuf.InvalidProtocolBufferException( 8799 e.getMessage()).setUnfinishedMessage(this); 8800 } finally { 8801 this.unknownFields = unknownFields.build(); 8802 makeExtensionsImmutable(); 8803 } 8804 } 8805 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8806 getDescriptor() { 8807 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; 8808 } 8809 8810 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8811 internalGetFieldAccessorTable() { 8812 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable 8813 .ensureFieldAccessorsInitialized( 8814 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); 8815 } 8816 8817 public static com.google.protobuf.Parser<NameStringPair> PARSER = 8818 new com.google.protobuf.AbstractParser<NameStringPair>() { 8819 public NameStringPair parsePartialFrom( 8820 com.google.protobuf.CodedInputStream input, 8821 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8822 throws com.google.protobuf.InvalidProtocolBufferException { 8823 return new NameStringPair(input, extensionRegistry); 8824 } 8825 }; 8826 8827 @java.lang.Override getParserForType()8828 public com.google.protobuf.Parser<NameStringPair> getParserForType() { 8829 return PARSER; 8830 } 8831 8832 private int bitField0_; 8833 // required string name = 1; 8834 public static final int NAME_FIELD_NUMBER = 1; 8835 private java.lang.Object name_; 8836 /** 8837 * <code>required string name = 1;</code> 8838 */ hasName()8839 public boolean hasName() { 8840 return ((bitField0_ & 0x00000001) == 0x00000001); 8841 } 8842 /** 8843 * <code>required string name = 1;</code> 8844 */ getName()8845 public java.lang.String getName() { 8846 java.lang.Object ref = name_; 8847 if (ref instanceof java.lang.String) { 8848 return (java.lang.String) ref; 8849 } else { 8850 com.google.protobuf.ByteString bs = 8851 (com.google.protobuf.ByteString) ref; 8852 java.lang.String s = bs.toStringUtf8(); 8853 if (bs.isValidUtf8()) { 8854 name_ = s; 8855 } 8856 return s; 8857 } 8858 } 8859 /** 8860 * <code>required string name = 1;</code> 8861 */ 8862 public com.google.protobuf.ByteString getNameBytes()8863 getNameBytes() { 8864 java.lang.Object ref = name_; 8865 if (ref instanceof java.lang.String) { 8866 com.google.protobuf.ByteString b = 8867 com.google.protobuf.ByteString.copyFromUtf8( 8868 (java.lang.String) ref); 8869 name_ = b; 8870 return b; 8871 } else { 8872 return (com.google.protobuf.ByteString) ref; 8873 } 8874 } 8875 8876 // required string value = 2; 8877 public static final int VALUE_FIELD_NUMBER = 2; 8878 private java.lang.Object value_; 8879 /** 8880 * <code>required string value = 2;</code> 8881 */ hasValue()8882 public boolean hasValue() { 8883 return ((bitField0_ & 0x00000002) == 0x00000002); 8884 } 8885 /** 8886 * <code>required string value = 2;</code> 8887 */ getValue()8888 public java.lang.String getValue() { 8889 java.lang.Object ref = value_; 8890 if (ref instanceof java.lang.String) { 8891 return (java.lang.String) ref; 8892 } else { 8893 com.google.protobuf.ByteString bs = 8894 (com.google.protobuf.ByteString) ref; 8895 java.lang.String s = bs.toStringUtf8(); 8896 if (bs.isValidUtf8()) { 8897 value_ = s; 8898 } 8899 return s; 8900 } 8901 } 8902 /** 8903 * <code>required string value = 2;</code> 8904 */ 8905 public com.google.protobuf.ByteString getValueBytes()8906 getValueBytes() { 8907 java.lang.Object ref = value_; 8908 if (ref instanceof java.lang.String) { 8909 com.google.protobuf.ByteString b = 8910 com.google.protobuf.ByteString.copyFromUtf8( 8911 (java.lang.String) ref); 8912 value_ = b; 8913 return b; 8914 } else { 8915 return (com.google.protobuf.ByteString) ref; 8916 } 8917 } 8918 initFields()8919 private void initFields() { 8920 name_ = ""; 8921 value_ = ""; 8922 } 8923 private byte memoizedIsInitialized = -1; isInitialized()8924 public final boolean isInitialized() { 8925 byte isInitialized = memoizedIsInitialized; 8926 if (isInitialized != -1) return isInitialized == 1; 8927 8928 if (!hasName()) { 8929 memoizedIsInitialized = 0; 8930 return false; 8931 } 8932 if (!hasValue()) { 8933 memoizedIsInitialized = 0; 8934 return false; 8935 } 8936 memoizedIsInitialized = 1; 8937 return true; 8938 } 8939 writeTo(com.google.protobuf.CodedOutputStream output)8940 public void writeTo(com.google.protobuf.CodedOutputStream output) 8941 throws java.io.IOException { 8942 getSerializedSize(); 8943 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8944 output.writeBytes(1, getNameBytes()); 8945 } 8946 if (((bitField0_ & 0x00000002) == 0x00000002)) { 8947 output.writeBytes(2, getValueBytes()); 8948 } 8949 getUnknownFields().writeTo(output); 8950 } 8951 8952 private int memoizedSerializedSize = -1; getSerializedSize()8953 public int getSerializedSize() { 8954 int size = memoizedSerializedSize; 8955 if (size != -1) return size; 8956 8957 size = 0; 8958 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8959 size += com.google.protobuf.CodedOutputStream 8960 .computeBytesSize(1, getNameBytes()); 8961 } 8962 if (((bitField0_ & 0x00000002) == 0x00000002)) { 8963 size += com.google.protobuf.CodedOutputStream 8964 .computeBytesSize(2, getValueBytes()); 8965 } 8966 size += getUnknownFields().getSerializedSize(); 8967 memoizedSerializedSize = size; 8968 return size; 8969 } 8970 8971 private static final long serialVersionUID = 0L; 8972 @java.lang.Override writeReplace()8973 protected java.lang.Object writeReplace() 8974 throws java.io.ObjectStreamException { 8975 return super.writeReplace(); 8976 } 8977 8978 @java.lang.Override equals(final java.lang.Object obj)8979 public boolean equals(final java.lang.Object obj) { 8980 if (obj == this) { 8981 return true; 8982 } 8983 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)) { 8984 return super.equals(obj); 8985 } 8986 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) obj; 8987 8988 boolean result = true; 8989 result = result && (hasName() == other.hasName()); 8990 if (hasName()) { 8991 result = result && getName() 8992 .equals(other.getName()); 8993 } 8994 result = result && (hasValue() == other.hasValue()); 8995 if (hasValue()) { 8996 result = result && getValue() 8997 .equals(other.getValue()); 8998 } 8999 result = result && 9000 getUnknownFields().equals(other.getUnknownFields()); 9001 return result; 9002 } 9003 9004 private int memoizedHashCode = 0; 9005 @java.lang.Override hashCode()9006 public int hashCode() { 9007 if (memoizedHashCode != 0) { 9008 return memoizedHashCode; 9009 } 9010 int hash = 41; 9011 hash = (19 * hash) + getDescriptorForType().hashCode(); 9012 if (hasName()) { 9013 hash = (37 * hash) + NAME_FIELD_NUMBER; 9014 hash = (53 * hash) + getName().hashCode(); 9015 } 9016 if (hasValue()) { 9017 hash = (37 * hash) + VALUE_FIELD_NUMBER; 9018 hash = (53 * hash) + getValue().hashCode(); 9019 } 9020 hash = (29 * hash) + getUnknownFields().hashCode(); 9021 memoizedHashCode = hash; 9022 return hash; 9023 } 9024 parseFrom( com.google.protobuf.ByteString data)9025 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( 9026 com.google.protobuf.ByteString data) 9027 throws com.google.protobuf.InvalidProtocolBufferException { 9028 return PARSER.parseFrom(data); 9029 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9030 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( 9031 com.google.protobuf.ByteString data, 9032 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9033 throws com.google.protobuf.InvalidProtocolBufferException { 9034 return PARSER.parseFrom(data, extensionRegistry); 9035 } parseFrom(byte[] data)9036 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(byte[] data) 9037 throws com.google.protobuf.InvalidProtocolBufferException { 9038 return PARSER.parseFrom(data); 9039 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9040 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( 9041 byte[] data, 9042 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9043 throws com.google.protobuf.InvalidProtocolBufferException { 9044 return PARSER.parseFrom(data, extensionRegistry); 9045 } parseFrom(java.io.InputStream input)9046 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom(java.io.InputStream input) 9047 throws java.io.IOException { 9048 return PARSER.parseFrom(input); 9049 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9050 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( 9051 java.io.InputStream input, 9052 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9053 throws java.io.IOException { 9054 return PARSER.parseFrom(input, extensionRegistry); 9055 } parseDelimitedFrom(java.io.InputStream input)9056 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom(java.io.InputStream input) 9057 throws java.io.IOException { 9058 return PARSER.parseDelimitedFrom(input); 9059 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9060 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseDelimitedFrom( 9061 java.io.InputStream input, 9062 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9063 throws java.io.IOException { 9064 return PARSER.parseDelimitedFrom(input, extensionRegistry); 9065 } parseFrom( com.google.protobuf.CodedInputStream input)9066 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( 9067 com.google.protobuf.CodedInputStream input) 9068 throws java.io.IOException { 9069 return PARSER.parseFrom(input); 9070 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9071 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parseFrom( 9072 com.google.protobuf.CodedInputStream input, 9073 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9074 throws java.io.IOException { 9075 return PARSER.parseFrom(input, extensionRegistry); 9076 } 9077 newBuilder()9078 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()9079 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype)9080 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair prototype) { 9081 return newBuilder().mergeFrom(prototype); 9082 } toBuilder()9083 public Builder toBuilder() { return newBuilder(this); } 9084 9085 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9086 protected Builder newBuilderForType( 9087 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9088 Builder builder = new Builder(parent); 9089 return builder; 9090 } 9091 /** 9092 * Protobuf type {@code NameStringPair} 9093 */ 9094 public static final class Builder extends 9095 com.google.protobuf.GeneratedMessage.Builder<Builder> 9096 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder { 9097 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9098 getDescriptor() { 9099 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; 9100 } 9101 9102 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9103 internalGetFieldAccessorTable() { 9104 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_fieldAccessorTable 9105 .ensureFieldAccessorsInitialized( 9106 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder.class); 9107 } 9108 9109 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder() Builder()9110 private Builder() { 9111 maybeForceBuilderInitialization(); 9112 } 9113 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9114 private Builder( 9115 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9116 super(parent); 9117 maybeForceBuilderInitialization(); 9118 } maybeForceBuilderInitialization()9119 private void maybeForceBuilderInitialization() { 9120 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 9121 } 9122 } create()9123 private static Builder create() { 9124 return new Builder(); 9125 } 9126 clear()9127 public Builder clear() { 9128 super.clear(); 9129 name_ = ""; 9130 bitField0_ = (bitField0_ & ~0x00000001); 9131 value_ = ""; 9132 bitField0_ = (bitField0_ & ~0x00000002); 9133 return this; 9134 } 9135 clone()9136 public Builder clone() { 9137 return create().mergeFrom(buildPartial()); 9138 } 9139 9140 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()9141 getDescriptorForType() { 9142 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameStringPair_descriptor; 9143 } 9144 getDefaultInstanceForType()9145 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getDefaultInstanceForType() { 9146 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance(); 9147 } 9148 build()9149 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair build() { 9150 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = buildPartial(); 9151 if (!result.isInitialized()) { 9152 throw newUninitializedMessageException(result); 9153 } 9154 return result; 9155 } 9156 buildPartial()9157 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair buildPartial() { 9158 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair(this); 9159 int from_bitField0_ = bitField0_; 9160 int to_bitField0_ = 0; 9161 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 9162 to_bitField0_ |= 0x00000001; 9163 } 9164 result.name_ = name_; 9165 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 9166 to_bitField0_ |= 0x00000002; 9167 } 9168 result.value_ = value_; 9169 result.bitField0_ = to_bitField0_; 9170 onBuilt(); 9171 return result; 9172 } 9173 mergeFrom(com.google.protobuf.Message other)9174 public Builder mergeFrom(com.google.protobuf.Message other) { 9175 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) { 9176 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair)other); 9177 } else { 9178 super.mergeFrom(other); 9179 return this; 9180 } 9181 } 9182 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other)9183 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair other) { 9184 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance()) return this; 9185 if (other.hasName()) { 9186 bitField0_ |= 0x00000001; 9187 name_ = other.name_; 9188 onChanged(); 9189 } 9190 if (other.hasValue()) { 9191 bitField0_ |= 0x00000002; 9192 value_ = other.value_; 9193 onChanged(); 9194 } 9195 this.mergeUnknownFields(other.getUnknownFields()); 9196 return this; 9197 } 9198 isInitialized()9199 public final boolean isInitialized() { 9200 if (!hasName()) { 9201 9202 return false; 9203 } 9204 if (!hasValue()) { 9205 9206 return false; 9207 } 9208 return true; 9209 } 9210 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9211 public Builder mergeFrom( 9212 com.google.protobuf.CodedInputStream input, 9213 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9214 throws java.io.IOException { 9215 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair parsedMessage = null; 9216 try { 9217 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 9218 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9219 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair) e.getUnfinishedMessage(); 9220 throw e; 9221 } finally { 9222 if (parsedMessage != null) { 9223 mergeFrom(parsedMessage); 9224 } 9225 } 9226 return this; 9227 } 9228 private int bitField0_; 9229 9230 // required string name = 1; 9231 private java.lang.Object name_ = ""; 9232 /** 9233 * <code>required string name = 1;</code> 9234 */ hasName()9235 public boolean hasName() { 9236 return ((bitField0_ & 0x00000001) == 0x00000001); 9237 } 9238 /** 9239 * <code>required string name = 1;</code> 9240 */ getName()9241 public java.lang.String getName() { 9242 java.lang.Object ref = name_; 9243 if (!(ref instanceof java.lang.String)) { 9244 java.lang.String s = ((com.google.protobuf.ByteString) ref) 9245 .toStringUtf8(); 9246 name_ = s; 9247 return s; 9248 } else { 9249 return (java.lang.String) ref; 9250 } 9251 } 9252 /** 9253 * <code>required string name = 1;</code> 9254 */ 9255 public com.google.protobuf.ByteString getNameBytes()9256 getNameBytes() { 9257 java.lang.Object ref = name_; 9258 if (ref instanceof String) { 9259 com.google.protobuf.ByteString b = 9260 com.google.protobuf.ByteString.copyFromUtf8( 9261 (java.lang.String) ref); 9262 name_ = b; 9263 return b; 9264 } else { 9265 return (com.google.protobuf.ByteString) ref; 9266 } 9267 } 9268 /** 9269 * <code>required string name = 1;</code> 9270 */ setName( java.lang.String value)9271 public Builder setName( 9272 java.lang.String value) { 9273 if (value == null) { 9274 throw new NullPointerException(); 9275 } 9276 bitField0_ |= 0x00000001; 9277 name_ = value; 9278 onChanged(); 9279 return this; 9280 } 9281 /** 9282 * <code>required string name = 1;</code> 9283 */ clearName()9284 public Builder clearName() { 9285 bitField0_ = (bitField0_ & ~0x00000001); 9286 name_ = getDefaultInstance().getName(); 9287 onChanged(); 9288 return this; 9289 } 9290 /** 9291 * <code>required string name = 1;</code> 9292 */ setNameBytes( com.google.protobuf.ByteString value)9293 public Builder setNameBytes( 9294 com.google.protobuf.ByteString value) { 9295 if (value == null) { 9296 throw new NullPointerException(); 9297 } 9298 bitField0_ |= 0x00000001; 9299 name_ = value; 9300 onChanged(); 9301 return this; 9302 } 9303 9304 // required string value = 2; 9305 private java.lang.Object value_ = ""; 9306 /** 9307 * <code>required string value = 2;</code> 9308 */ hasValue()9309 public boolean hasValue() { 9310 return ((bitField0_ & 0x00000002) == 0x00000002); 9311 } 9312 /** 9313 * <code>required string value = 2;</code> 9314 */ getValue()9315 public java.lang.String getValue() { 9316 java.lang.Object ref = value_; 9317 if (!(ref instanceof java.lang.String)) { 9318 java.lang.String s = ((com.google.protobuf.ByteString) ref) 9319 .toStringUtf8(); 9320 value_ = s; 9321 return s; 9322 } else { 9323 return (java.lang.String) ref; 9324 } 9325 } 9326 /** 9327 * <code>required string value = 2;</code> 9328 */ 9329 public com.google.protobuf.ByteString getValueBytes()9330 getValueBytes() { 9331 java.lang.Object ref = value_; 9332 if (ref instanceof String) { 9333 com.google.protobuf.ByteString b = 9334 com.google.protobuf.ByteString.copyFromUtf8( 9335 (java.lang.String) ref); 9336 value_ = b; 9337 return b; 9338 } else { 9339 return (com.google.protobuf.ByteString) ref; 9340 } 9341 } 9342 /** 9343 * <code>required string value = 2;</code> 9344 */ setValue( java.lang.String value)9345 public Builder setValue( 9346 java.lang.String value) { 9347 if (value == null) { 9348 throw new NullPointerException(); 9349 } 9350 bitField0_ |= 0x00000002; 9351 value_ = value; 9352 onChanged(); 9353 return this; 9354 } 9355 /** 9356 * <code>required string value = 2;</code> 9357 */ clearValue()9358 public Builder clearValue() { 9359 bitField0_ = (bitField0_ & ~0x00000002); 9360 value_ = getDefaultInstance().getValue(); 9361 onChanged(); 9362 return this; 9363 } 9364 /** 9365 * <code>required string value = 2;</code> 9366 */ setValueBytes( com.google.protobuf.ByteString value)9367 public Builder setValueBytes( 9368 com.google.protobuf.ByteString value) { 9369 if (value == null) { 9370 throw new NullPointerException(); 9371 } 9372 bitField0_ |= 0x00000002; 9373 value_ = value; 9374 onChanged(); 9375 return this; 9376 } 9377 9378 // @@protoc_insertion_point(builder_scope:NameStringPair) 9379 } 9380 9381 static { 9382 defaultInstance = new NameStringPair(true); defaultInstance.initFields()9383 defaultInstance.initFields(); 9384 } 9385 9386 // @@protoc_insertion_point(class_scope:NameStringPair) 9387 } 9388 9389 public interface NameBytesPairOrBuilder 9390 extends com.google.protobuf.MessageOrBuilder { 9391 9392 // required string name = 1; 9393 /** 9394 * <code>required string name = 1;</code> 9395 */ hasName()9396 boolean hasName(); 9397 /** 9398 * <code>required string name = 1;</code> 9399 */ getName()9400 java.lang.String getName(); 9401 /** 9402 * <code>required string name = 1;</code> 9403 */ 9404 com.google.protobuf.ByteString getNameBytes()9405 getNameBytes(); 9406 9407 // optional bytes value = 2; 9408 /** 9409 * <code>optional bytes value = 2;</code> 9410 */ hasValue()9411 boolean hasValue(); 9412 /** 9413 * <code>optional bytes value = 2;</code> 9414 */ getValue()9415 com.google.protobuf.ByteString getValue(); 9416 } 9417 /** 9418 * Protobuf type {@code NameBytesPair} 9419 */ 9420 public static final class NameBytesPair extends 9421 com.google.protobuf.GeneratedMessage 9422 implements NameBytesPairOrBuilder { 9423 // Use NameBytesPair.newBuilder() to construct. NameBytesPair(com.google.protobuf.GeneratedMessage.Builder<?> builder)9424 private NameBytesPair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 9425 super(builder); 9426 this.unknownFields = builder.getUnknownFields(); 9427 } NameBytesPair(boolean noInit)9428 private NameBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 9429 9430 private static final NameBytesPair defaultInstance; getDefaultInstance()9431 public static NameBytesPair getDefaultInstance() { 9432 return defaultInstance; 9433 } 9434 getDefaultInstanceForType()9435 public NameBytesPair getDefaultInstanceForType() { 9436 return defaultInstance; 9437 } 9438 9439 private final com.google.protobuf.UnknownFieldSet unknownFields; 9440 @java.lang.Override 9441 public final com.google.protobuf.UnknownFieldSet getUnknownFields()9442 getUnknownFields() { 9443 return this.unknownFields; 9444 } NameBytesPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9445 private NameBytesPair( 9446 com.google.protobuf.CodedInputStream input, 9447 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9448 throws com.google.protobuf.InvalidProtocolBufferException { 9449 initFields(); 9450 int mutable_bitField0_ = 0; 9451 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 9452 com.google.protobuf.UnknownFieldSet.newBuilder(); 9453 try { 9454 boolean done = false; 9455 while (!done) { 9456 int tag = input.readTag(); 9457 switch (tag) { 9458 case 0: 9459 done = true; 9460 break; 9461 default: { 9462 if (!parseUnknownField(input, unknownFields, 9463 extensionRegistry, tag)) { 9464 done = true; 9465 } 9466 break; 9467 } 9468 case 10: { 9469 bitField0_ |= 0x00000001; 9470 name_ = input.readBytes(); 9471 break; 9472 } 9473 case 18: { 9474 bitField0_ |= 0x00000002; 9475 value_ = input.readBytes(); 9476 break; 9477 } 9478 } 9479 } 9480 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9481 throw e.setUnfinishedMessage(this); 9482 } catch (java.io.IOException e) { 9483 throw new com.google.protobuf.InvalidProtocolBufferException( 9484 e.getMessage()).setUnfinishedMessage(this); 9485 } finally { 9486 this.unknownFields = unknownFields.build(); 9487 makeExtensionsImmutable(); 9488 } 9489 } 9490 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9491 getDescriptor() { 9492 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; 9493 } 9494 9495 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9496 internalGetFieldAccessorTable() { 9497 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable 9498 .ensureFieldAccessorsInitialized( 9499 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); 9500 } 9501 9502 public static com.google.protobuf.Parser<NameBytesPair> PARSER = 9503 new com.google.protobuf.AbstractParser<NameBytesPair>() { 9504 public NameBytesPair parsePartialFrom( 9505 com.google.protobuf.CodedInputStream input, 9506 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9507 throws com.google.protobuf.InvalidProtocolBufferException { 9508 return new NameBytesPair(input, extensionRegistry); 9509 } 9510 }; 9511 9512 @java.lang.Override getParserForType()9513 public com.google.protobuf.Parser<NameBytesPair> getParserForType() { 9514 return PARSER; 9515 } 9516 9517 private int bitField0_; 9518 // required string name = 1; 9519 public static final int NAME_FIELD_NUMBER = 1; 9520 private java.lang.Object name_; 9521 /** 9522 * <code>required string name = 1;</code> 9523 */ hasName()9524 public boolean hasName() { 9525 return ((bitField0_ & 0x00000001) == 0x00000001); 9526 } 9527 /** 9528 * <code>required string name = 1;</code> 9529 */ getName()9530 public java.lang.String getName() { 9531 java.lang.Object ref = name_; 9532 if (ref instanceof java.lang.String) { 9533 return (java.lang.String) ref; 9534 } else { 9535 com.google.protobuf.ByteString bs = 9536 (com.google.protobuf.ByteString) ref; 9537 java.lang.String s = bs.toStringUtf8(); 9538 if (bs.isValidUtf8()) { 9539 name_ = s; 9540 } 9541 return s; 9542 } 9543 } 9544 /** 9545 * <code>required string name = 1;</code> 9546 */ 9547 public com.google.protobuf.ByteString getNameBytes()9548 getNameBytes() { 9549 java.lang.Object ref = name_; 9550 if (ref instanceof java.lang.String) { 9551 com.google.protobuf.ByteString b = 9552 com.google.protobuf.ByteString.copyFromUtf8( 9553 (java.lang.String) ref); 9554 name_ = b; 9555 return b; 9556 } else { 9557 return (com.google.protobuf.ByteString) ref; 9558 } 9559 } 9560 9561 // optional bytes value = 2; 9562 public static final int VALUE_FIELD_NUMBER = 2; 9563 private com.google.protobuf.ByteString value_; 9564 /** 9565 * <code>optional bytes value = 2;</code> 9566 */ hasValue()9567 public boolean hasValue() { 9568 return ((bitField0_ & 0x00000002) == 0x00000002); 9569 } 9570 /** 9571 * <code>optional bytes value = 2;</code> 9572 */ getValue()9573 public com.google.protobuf.ByteString getValue() { 9574 return value_; 9575 } 9576 initFields()9577 private void initFields() { 9578 name_ = ""; 9579 value_ = com.google.protobuf.ByteString.EMPTY; 9580 } 9581 private byte memoizedIsInitialized = -1; isInitialized()9582 public final boolean isInitialized() { 9583 byte isInitialized = memoizedIsInitialized; 9584 if (isInitialized != -1) return isInitialized == 1; 9585 9586 if (!hasName()) { 9587 memoizedIsInitialized = 0; 9588 return false; 9589 } 9590 memoizedIsInitialized = 1; 9591 return true; 9592 } 9593 writeTo(com.google.protobuf.CodedOutputStream output)9594 public void writeTo(com.google.protobuf.CodedOutputStream output) 9595 throws java.io.IOException { 9596 getSerializedSize(); 9597 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9598 output.writeBytes(1, getNameBytes()); 9599 } 9600 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9601 output.writeBytes(2, value_); 9602 } 9603 getUnknownFields().writeTo(output); 9604 } 9605 9606 private int memoizedSerializedSize = -1; getSerializedSize()9607 public int getSerializedSize() { 9608 int size = memoizedSerializedSize; 9609 if (size != -1) return size; 9610 9611 size = 0; 9612 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9613 size += com.google.protobuf.CodedOutputStream 9614 .computeBytesSize(1, getNameBytes()); 9615 } 9616 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9617 size += com.google.protobuf.CodedOutputStream 9618 .computeBytesSize(2, value_); 9619 } 9620 size += getUnknownFields().getSerializedSize(); 9621 memoizedSerializedSize = size; 9622 return size; 9623 } 9624 9625 private static final long serialVersionUID = 0L; 9626 @java.lang.Override writeReplace()9627 protected java.lang.Object writeReplace() 9628 throws java.io.ObjectStreamException { 9629 return super.writeReplace(); 9630 } 9631 9632 @java.lang.Override equals(final java.lang.Object obj)9633 public boolean equals(final java.lang.Object obj) { 9634 if (obj == this) { 9635 return true; 9636 } 9637 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)) { 9638 return super.equals(obj); 9639 } 9640 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) obj; 9641 9642 boolean result = true; 9643 result = result && (hasName() == other.hasName()); 9644 if (hasName()) { 9645 result = result && getName() 9646 .equals(other.getName()); 9647 } 9648 result = result && (hasValue() == other.hasValue()); 9649 if (hasValue()) { 9650 result = result && getValue() 9651 .equals(other.getValue()); 9652 } 9653 result = result && 9654 getUnknownFields().equals(other.getUnknownFields()); 9655 return result; 9656 } 9657 9658 private int memoizedHashCode = 0; 9659 @java.lang.Override hashCode()9660 public int hashCode() { 9661 if (memoizedHashCode != 0) { 9662 return memoizedHashCode; 9663 } 9664 int hash = 41; 9665 hash = (19 * hash) + getDescriptorForType().hashCode(); 9666 if (hasName()) { 9667 hash = (37 * hash) + NAME_FIELD_NUMBER; 9668 hash = (53 * hash) + getName().hashCode(); 9669 } 9670 if (hasValue()) { 9671 hash = (37 * hash) + VALUE_FIELD_NUMBER; 9672 hash = (53 * hash) + getValue().hashCode(); 9673 } 9674 hash = (29 * hash) + getUnknownFields().hashCode(); 9675 memoizedHashCode = hash; 9676 return hash; 9677 } 9678 parseFrom( com.google.protobuf.ByteString data)9679 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( 9680 com.google.protobuf.ByteString data) 9681 throws com.google.protobuf.InvalidProtocolBufferException { 9682 return PARSER.parseFrom(data); 9683 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9684 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( 9685 com.google.protobuf.ByteString data, 9686 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9687 throws com.google.protobuf.InvalidProtocolBufferException { 9688 return PARSER.parseFrom(data, extensionRegistry); 9689 } parseFrom(byte[] data)9690 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(byte[] data) 9691 throws com.google.protobuf.InvalidProtocolBufferException { 9692 return PARSER.parseFrom(data); 9693 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9694 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( 9695 byte[] data, 9696 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9697 throws com.google.protobuf.InvalidProtocolBufferException { 9698 return PARSER.parseFrom(data, extensionRegistry); 9699 } parseFrom(java.io.InputStream input)9700 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom(java.io.InputStream input) 9701 throws java.io.IOException { 9702 return PARSER.parseFrom(input); 9703 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9704 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( 9705 java.io.InputStream input, 9706 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9707 throws java.io.IOException { 9708 return PARSER.parseFrom(input, extensionRegistry); 9709 } parseDelimitedFrom(java.io.InputStream input)9710 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom(java.io.InputStream input) 9711 throws java.io.IOException { 9712 return PARSER.parseDelimitedFrom(input); 9713 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9714 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseDelimitedFrom( 9715 java.io.InputStream input, 9716 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9717 throws java.io.IOException { 9718 return PARSER.parseDelimitedFrom(input, extensionRegistry); 9719 } parseFrom( com.google.protobuf.CodedInputStream input)9720 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( 9721 com.google.protobuf.CodedInputStream input) 9722 throws java.io.IOException { 9723 return PARSER.parseFrom(input); 9724 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9725 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parseFrom( 9726 com.google.protobuf.CodedInputStream input, 9727 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9728 throws java.io.IOException { 9729 return PARSER.parseFrom(input, extensionRegistry); 9730 } 9731 newBuilder()9732 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()9733 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype)9734 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair prototype) { 9735 return newBuilder().mergeFrom(prototype); 9736 } toBuilder()9737 public Builder toBuilder() { return newBuilder(this); } 9738 9739 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9740 protected Builder newBuilderForType( 9741 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9742 Builder builder = new Builder(parent); 9743 return builder; 9744 } 9745 /** 9746 * Protobuf type {@code NameBytesPair} 9747 */ 9748 public static final class Builder extends 9749 com.google.protobuf.GeneratedMessage.Builder<Builder> 9750 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder { 9751 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9752 getDescriptor() { 9753 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; 9754 } 9755 9756 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9757 internalGetFieldAccessorTable() { 9758 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_fieldAccessorTable 9759 .ensureFieldAccessorsInitialized( 9760 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder.class); 9761 } 9762 9763 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder() Builder()9764 private Builder() { 9765 maybeForceBuilderInitialization(); 9766 } 9767 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9768 private Builder( 9769 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9770 super(parent); 9771 maybeForceBuilderInitialization(); 9772 } maybeForceBuilderInitialization()9773 private void maybeForceBuilderInitialization() { 9774 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 9775 } 9776 } create()9777 private static Builder create() { 9778 return new Builder(); 9779 } 9780 clear()9781 public Builder clear() { 9782 super.clear(); 9783 name_ = ""; 9784 bitField0_ = (bitField0_ & ~0x00000001); 9785 value_ = com.google.protobuf.ByteString.EMPTY; 9786 bitField0_ = (bitField0_ & ~0x00000002); 9787 return this; 9788 } 9789 clone()9790 public Builder clone() { 9791 return create().mergeFrom(buildPartial()); 9792 } 9793 9794 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()9795 getDescriptorForType() { 9796 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameBytesPair_descriptor; 9797 } 9798 getDefaultInstanceForType()9799 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getDefaultInstanceForType() { 9800 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance(); 9801 } 9802 build()9803 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair build() { 9804 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = buildPartial(); 9805 if (!result.isInitialized()) { 9806 throw newUninitializedMessageException(result); 9807 } 9808 return result; 9809 } 9810 buildPartial()9811 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair buildPartial() { 9812 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair(this); 9813 int from_bitField0_ = bitField0_; 9814 int to_bitField0_ = 0; 9815 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 9816 to_bitField0_ |= 0x00000001; 9817 } 9818 result.name_ = name_; 9819 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 9820 to_bitField0_ |= 0x00000002; 9821 } 9822 result.value_ = value_; 9823 result.bitField0_ = to_bitField0_; 9824 onBuilt(); 9825 return result; 9826 } 9827 mergeFrom(com.google.protobuf.Message other)9828 public Builder mergeFrom(com.google.protobuf.Message other) { 9829 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) { 9830 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair)other); 9831 } else { 9832 super.mergeFrom(other); 9833 return this; 9834 } 9835 } 9836 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other)9837 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair other) { 9838 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) return this; 9839 if (other.hasName()) { 9840 bitField0_ |= 0x00000001; 9841 name_ = other.name_; 9842 onChanged(); 9843 } 9844 if (other.hasValue()) { 9845 setValue(other.getValue()); 9846 } 9847 this.mergeUnknownFields(other.getUnknownFields()); 9848 return this; 9849 } 9850 isInitialized()9851 public final boolean isInitialized() { 9852 if (!hasName()) { 9853 9854 return false; 9855 } 9856 return true; 9857 } 9858 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9859 public Builder mergeFrom( 9860 com.google.protobuf.CodedInputStream input, 9861 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9862 throws java.io.IOException { 9863 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair parsedMessage = null; 9864 try { 9865 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 9866 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9867 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair) e.getUnfinishedMessage(); 9868 throw e; 9869 } finally { 9870 if (parsedMessage != null) { 9871 mergeFrom(parsedMessage); 9872 } 9873 } 9874 return this; 9875 } 9876 private int bitField0_; 9877 9878 // required string name = 1; 9879 private java.lang.Object name_ = ""; 9880 /** 9881 * <code>required string name = 1;</code> 9882 */ hasName()9883 public boolean hasName() { 9884 return ((bitField0_ & 0x00000001) == 0x00000001); 9885 } 9886 /** 9887 * <code>required string name = 1;</code> 9888 */ getName()9889 public java.lang.String getName() { 9890 java.lang.Object ref = name_; 9891 if (!(ref instanceof java.lang.String)) { 9892 java.lang.String s = ((com.google.protobuf.ByteString) ref) 9893 .toStringUtf8(); 9894 name_ = s; 9895 return s; 9896 } else { 9897 return (java.lang.String) ref; 9898 } 9899 } 9900 /** 9901 * <code>required string name = 1;</code> 9902 */ 9903 public com.google.protobuf.ByteString getNameBytes()9904 getNameBytes() { 9905 java.lang.Object ref = name_; 9906 if (ref instanceof String) { 9907 com.google.protobuf.ByteString b = 9908 com.google.protobuf.ByteString.copyFromUtf8( 9909 (java.lang.String) ref); 9910 name_ = b; 9911 return b; 9912 } else { 9913 return (com.google.protobuf.ByteString) ref; 9914 } 9915 } 9916 /** 9917 * <code>required string name = 1;</code> 9918 */ setName( java.lang.String value)9919 public Builder setName( 9920 java.lang.String value) { 9921 if (value == null) { 9922 throw new NullPointerException(); 9923 } 9924 bitField0_ |= 0x00000001; 9925 name_ = value; 9926 onChanged(); 9927 return this; 9928 } 9929 /** 9930 * <code>required string name = 1;</code> 9931 */ clearName()9932 public Builder clearName() { 9933 bitField0_ = (bitField0_ & ~0x00000001); 9934 name_ = getDefaultInstance().getName(); 9935 onChanged(); 9936 return this; 9937 } 9938 /** 9939 * <code>required string name = 1;</code> 9940 */ setNameBytes( com.google.protobuf.ByteString value)9941 public Builder setNameBytes( 9942 com.google.protobuf.ByteString value) { 9943 if (value == null) { 9944 throw new NullPointerException(); 9945 } 9946 bitField0_ |= 0x00000001; 9947 name_ = value; 9948 onChanged(); 9949 return this; 9950 } 9951 9952 // optional bytes value = 2; 9953 private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; 9954 /** 9955 * <code>optional bytes value = 2;</code> 9956 */ hasValue()9957 public boolean hasValue() { 9958 return ((bitField0_ & 0x00000002) == 0x00000002); 9959 } 9960 /** 9961 * <code>optional bytes value = 2;</code> 9962 */ getValue()9963 public com.google.protobuf.ByteString getValue() { 9964 return value_; 9965 } 9966 /** 9967 * <code>optional bytes value = 2;</code> 9968 */ setValue(com.google.protobuf.ByteString value)9969 public Builder setValue(com.google.protobuf.ByteString value) { 9970 if (value == null) { 9971 throw new NullPointerException(); 9972 } 9973 bitField0_ |= 0x00000002; 9974 value_ = value; 9975 onChanged(); 9976 return this; 9977 } 9978 /** 9979 * <code>optional bytes value = 2;</code> 9980 */ clearValue()9981 public Builder clearValue() { 9982 bitField0_ = (bitField0_ & ~0x00000002); 9983 value_ = getDefaultInstance().getValue(); 9984 onChanged(); 9985 return this; 9986 } 9987 9988 // @@protoc_insertion_point(builder_scope:NameBytesPair) 9989 } 9990 9991 static { 9992 defaultInstance = new NameBytesPair(true); defaultInstance.initFields()9993 defaultInstance.initFields(); 9994 } 9995 9996 // @@protoc_insertion_point(class_scope:NameBytesPair) 9997 } 9998 9999 public interface BytesBytesPairOrBuilder 10000 extends com.google.protobuf.MessageOrBuilder { 10001 10002 // required bytes first = 1; 10003 /** 10004 * <code>required bytes first = 1;</code> 10005 */ hasFirst()10006 boolean hasFirst(); 10007 /** 10008 * <code>required bytes first = 1;</code> 10009 */ getFirst()10010 com.google.protobuf.ByteString getFirst(); 10011 10012 // required bytes second = 2; 10013 /** 10014 * <code>required bytes second = 2;</code> 10015 */ hasSecond()10016 boolean hasSecond(); 10017 /** 10018 * <code>required bytes second = 2;</code> 10019 */ getSecond()10020 com.google.protobuf.ByteString getSecond(); 10021 } 10022 /** 10023 * Protobuf type {@code BytesBytesPair} 10024 */ 10025 public static final class BytesBytesPair extends 10026 com.google.protobuf.GeneratedMessage 10027 implements BytesBytesPairOrBuilder { 10028 // Use BytesBytesPair.newBuilder() to construct. BytesBytesPair(com.google.protobuf.GeneratedMessage.Builder<?> builder)10029 private BytesBytesPair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 10030 super(builder); 10031 this.unknownFields = builder.getUnknownFields(); 10032 } BytesBytesPair(boolean noInit)10033 private BytesBytesPair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 10034 10035 private static final BytesBytesPair defaultInstance; getDefaultInstance()10036 public static BytesBytesPair getDefaultInstance() { 10037 return defaultInstance; 10038 } 10039 getDefaultInstanceForType()10040 public BytesBytesPair getDefaultInstanceForType() { 10041 return defaultInstance; 10042 } 10043 10044 private final com.google.protobuf.UnknownFieldSet unknownFields; 10045 @java.lang.Override 10046 public final com.google.protobuf.UnknownFieldSet getUnknownFields()10047 getUnknownFields() { 10048 return this.unknownFields; 10049 } BytesBytesPair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10050 private BytesBytesPair( 10051 com.google.protobuf.CodedInputStream input, 10052 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10053 throws com.google.protobuf.InvalidProtocolBufferException { 10054 initFields(); 10055 int mutable_bitField0_ = 0; 10056 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 10057 com.google.protobuf.UnknownFieldSet.newBuilder(); 10058 try { 10059 boolean done = false; 10060 while (!done) { 10061 int tag = input.readTag(); 10062 switch (tag) { 10063 case 0: 10064 done = true; 10065 break; 10066 default: { 10067 if (!parseUnknownField(input, unknownFields, 10068 extensionRegistry, tag)) { 10069 done = true; 10070 } 10071 break; 10072 } 10073 case 10: { 10074 bitField0_ |= 0x00000001; 10075 first_ = input.readBytes(); 10076 break; 10077 } 10078 case 18: { 10079 bitField0_ |= 0x00000002; 10080 second_ = input.readBytes(); 10081 break; 10082 } 10083 } 10084 } 10085 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10086 throw e.setUnfinishedMessage(this); 10087 } catch (java.io.IOException e) { 10088 throw new com.google.protobuf.InvalidProtocolBufferException( 10089 e.getMessage()).setUnfinishedMessage(this); 10090 } finally { 10091 this.unknownFields = unknownFields.build(); 10092 makeExtensionsImmutable(); 10093 } 10094 } 10095 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10096 getDescriptor() { 10097 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; 10098 } 10099 10100 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10101 internalGetFieldAccessorTable() { 10102 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable 10103 .ensureFieldAccessorsInitialized( 10104 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); 10105 } 10106 10107 public static com.google.protobuf.Parser<BytesBytesPair> PARSER = 10108 new com.google.protobuf.AbstractParser<BytesBytesPair>() { 10109 public BytesBytesPair parsePartialFrom( 10110 com.google.protobuf.CodedInputStream input, 10111 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10112 throws com.google.protobuf.InvalidProtocolBufferException { 10113 return new BytesBytesPair(input, extensionRegistry); 10114 } 10115 }; 10116 10117 @java.lang.Override getParserForType()10118 public com.google.protobuf.Parser<BytesBytesPair> getParserForType() { 10119 return PARSER; 10120 } 10121 10122 private int bitField0_; 10123 // required bytes first = 1; 10124 public static final int FIRST_FIELD_NUMBER = 1; 10125 private com.google.protobuf.ByteString first_; 10126 /** 10127 * <code>required bytes first = 1;</code> 10128 */ hasFirst()10129 public boolean hasFirst() { 10130 return ((bitField0_ & 0x00000001) == 0x00000001); 10131 } 10132 /** 10133 * <code>required bytes first = 1;</code> 10134 */ getFirst()10135 public com.google.protobuf.ByteString getFirst() { 10136 return first_; 10137 } 10138 10139 // required bytes second = 2; 10140 public static final int SECOND_FIELD_NUMBER = 2; 10141 private com.google.protobuf.ByteString second_; 10142 /** 10143 * <code>required bytes second = 2;</code> 10144 */ hasSecond()10145 public boolean hasSecond() { 10146 return ((bitField0_ & 0x00000002) == 0x00000002); 10147 } 10148 /** 10149 * <code>required bytes second = 2;</code> 10150 */ getSecond()10151 public com.google.protobuf.ByteString getSecond() { 10152 return second_; 10153 } 10154 initFields()10155 private void initFields() { 10156 first_ = com.google.protobuf.ByteString.EMPTY; 10157 second_ = com.google.protobuf.ByteString.EMPTY; 10158 } 10159 private byte memoizedIsInitialized = -1; isInitialized()10160 public final boolean isInitialized() { 10161 byte isInitialized = memoizedIsInitialized; 10162 if (isInitialized != -1) return isInitialized == 1; 10163 10164 if (!hasFirst()) { 10165 memoizedIsInitialized = 0; 10166 return false; 10167 } 10168 if (!hasSecond()) { 10169 memoizedIsInitialized = 0; 10170 return false; 10171 } 10172 memoizedIsInitialized = 1; 10173 return true; 10174 } 10175 writeTo(com.google.protobuf.CodedOutputStream output)10176 public void writeTo(com.google.protobuf.CodedOutputStream output) 10177 throws java.io.IOException { 10178 getSerializedSize(); 10179 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10180 output.writeBytes(1, first_); 10181 } 10182 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10183 output.writeBytes(2, second_); 10184 } 10185 getUnknownFields().writeTo(output); 10186 } 10187 10188 private int memoizedSerializedSize = -1; getSerializedSize()10189 public int getSerializedSize() { 10190 int size = memoizedSerializedSize; 10191 if (size != -1) return size; 10192 10193 size = 0; 10194 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10195 size += com.google.protobuf.CodedOutputStream 10196 .computeBytesSize(1, first_); 10197 } 10198 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10199 size += com.google.protobuf.CodedOutputStream 10200 .computeBytesSize(2, second_); 10201 } 10202 size += getUnknownFields().getSerializedSize(); 10203 memoizedSerializedSize = size; 10204 return size; 10205 } 10206 10207 private static final long serialVersionUID = 0L; 10208 @java.lang.Override writeReplace()10209 protected java.lang.Object writeReplace() 10210 throws java.io.ObjectStreamException { 10211 return super.writeReplace(); 10212 } 10213 10214 @java.lang.Override equals(final java.lang.Object obj)10215 public boolean equals(final java.lang.Object obj) { 10216 if (obj == this) { 10217 return true; 10218 } 10219 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)) { 10220 return super.equals(obj); 10221 } 10222 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) obj; 10223 10224 boolean result = true; 10225 result = result && (hasFirst() == other.hasFirst()); 10226 if (hasFirst()) { 10227 result = result && getFirst() 10228 .equals(other.getFirst()); 10229 } 10230 result = result && (hasSecond() == other.hasSecond()); 10231 if (hasSecond()) { 10232 result = result && getSecond() 10233 .equals(other.getSecond()); 10234 } 10235 result = result && 10236 getUnknownFields().equals(other.getUnknownFields()); 10237 return result; 10238 } 10239 10240 private int memoizedHashCode = 0; 10241 @java.lang.Override hashCode()10242 public int hashCode() { 10243 if (memoizedHashCode != 0) { 10244 return memoizedHashCode; 10245 } 10246 int hash = 41; 10247 hash = (19 * hash) + getDescriptorForType().hashCode(); 10248 if (hasFirst()) { 10249 hash = (37 * hash) + FIRST_FIELD_NUMBER; 10250 hash = (53 * hash) + getFirst().hashCode(); 10251 } 10252 if (hasSecond()) { 10253 hash = (37 * hash) + SECOND_FIELD_NUMBER; 10254 hash = (53 * hash) + getSecond().hashCode(); 10255 } 10256 hash = (29 * hash) + getUnknownFields().hashCode(); 10257 memoizedHashCode = hash; 10258 return hash; 10259 } 10260 parseFrom( com.google.protobuf.ByteString data)10261 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( 10262 com.google.protobuf.ByteString data) 10263 throws com.google.protobuf.InvalidProtocolBufferException { 10264 return PARSER.parseFrom(data); 10265 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10266 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( 10267 com.google.protobuf.ByteString data, 10268 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10269 throws com.google.protobuf.InvalidProtocolBufferException { 10270 return PARSER.parseFrom(data, extensionRegistry); 10271 } parseFrom(byte[] data)10272 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(byte[] data) 10273 throws com.google.protobuf.InvalidProtocolBufferException { 10274 return PARSER.parseFrom(data); 10275 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10276 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( 10277 byte[] data, 10278 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10279 throws com.google.protobuf.InvalidProtocolBufferException { 10280 return PARSER.parseFrom(data, extensionRegistry); 10281 } parseFrom(java.io.InputStream input)10282 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom(java.io.InputStream input) 10283 throws java.io.IOException { 10284 return PARSER.parseFrom(input); 10285 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10286 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( 10287 java.io.InputStream input, 10288 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10289 throws java.io.IOException { 10290 return PARSER.parseFrom(input, extensionRegistry); 10291 } parseDelimitedFrom(java.io.InputStream input)10292 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom(java.io.InputStream input) 10293 throws java.io.IOException { 10294 return PARSER.parseDelimitedFrom(input); 10295 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10296 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseDelimitedFrom( 10297 java.io.InputStream input, 10298 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10299 throws java.io.IOException { 10300 return PARSER.parseDelimitedFrom(input, extensionRegistry); 10301 } parseFrom( com.google.protobuf.CodedInputStream input)10302 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( 10303 com.google.protobuf.CodedInputStream input) 10304 throws java.io.IOException { 10305 return PARSER.parseFrom(input); 10306 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10307 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parseFrom( 10308 com.google.protobuf.CodedInputStream input, 10309 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10310 throws java.io.IOException { 10311 return PARSER.parseFrom(input, extensionRegistry); 10312 } 10313 newBuilder()10314 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()10315 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair prototype)10316 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair prototype) { 10317 return newBuilder().mergeFrom(prototype); 10318 } toBuilder()10319 public Builder toBuilder() { return newBuilder(this); } 10320 10321 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10322 protected Builder newBuilderForType( 10323 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10324 Builder builder = new Builder(parent); 10325 return builder; 10326 } 10327 /** 10328 * Protobuf type {@code BytesBytesPair} 10329 */ 10330 public static final class Builder extends 10331 com.google.protobuf.GeneratedMessage.Builder<Builder> 10332 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder { 10333 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10334 getDescriptor() { 10335 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; 10336 } 10337 10338 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10339 internalGetFieldAccessorTable() { 10340 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_fieldAccessorTable 10341 .ensureFieldAccessorsInitialized( 10342 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder.class); 10343 } 10344 10345 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.newBuilder() Builder()10346 private Builder() { 10347 maybeForceBuilderInitialization(); 10348 } 10349 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10350 private Builder( 10351 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10352 super(parent); 10353 maybeForceBuilderInitialization(); 10354 } maybeForceBuilderInitialization()10355 private void maybeForceBuilderInitialization() { 10356 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 10357 } 10358 } create()10359 private static Builder create() { 10360 return new Builder(); 10361 } 10362 clear()10363 public Builder clear() { 10364 super.clear(); 10365 first_ = com.google.protobuf.ByteString.EMPTY; 10366 bitField0_ = (bitField0_ & ~0x00000001); 10367 second_ = com.google.protobuf.ByteString.EMPTY; 10368 bitField0_ = (bitField0_ & ~0x00000002); 10369 return this; 10370 } 10371 clone()10372 public Builder clone() { 10373 return create().mergeFrom(buildPartial()); 10374 } 10375 10376 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()10377 getDescriptorForType() { 10378 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_BytesBytesPair_descriptor; 10379 } 10380 getDefaultInstanceForType()10381 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getDefaultInstanceForType() { 10382 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance(); 10383 } 10384 build()10385 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair build() { 10386 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = buildPartial(); 10387 if (!result.isInitialized()) { 10388 throw newUninitializedMessageException(result); 10389 } 10390 return result; 10391 } 10392 buildPartial()10393 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair buildPartial() { 10394 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair(this); 10395 int from_bitField0_ = bitField0_; 10396 int to_bitField0_ = 0; 10397 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 10398 to_bitField0_ |= 0x00000001; 10399 } 10400 result.first_ = first_; 10401 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 10402 to_bitField0_ |= 0x00000002; 10403 } 10404 result.second_ = second_; 10405 result.bitField0_ = to_bitField0_; 10406 onBuilt(); 10407 return result; 10408 } 10409 mergeFrom(com.google.protobuf.Message other)10410 public Builder mergeFrom(com.google.protobuf.Message other) { 10411 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) { 10412 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair)other); 10413 } else { 10414 super.mergeFrom(other); 10415 return this; 10416 } 10417 } 10418 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other)10419 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair other) { 10420 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()) return this; 10421 if (other.hasFirst()) { 10422 setFirst(other.getFirst()); 10423 } 10424 if (other.hasSecond()) { 10425 setSecond(other.getSecond()); 10426 } 10427 this.mergeUnknownFields(other.getUnknownFields()); 10428 return this; 10429 } 10430 isInitialized()10431 public final boolean isInitialized() { 10432 if (!hasFirst()) { 10433 10434 return false; 10435 } 10436 if (!hasSecond()) { 10437 10438 return false; 10439 } 10440 return true; 10441 } 10442 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10443 public Builder mergeFrom( 10444 com.google.protobuf.CodedInputStream input, 10445 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10446 throws java.io.IOException { 10447 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair parsedMessage = null; 10448 try { 10449 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 10450 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10451 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair) e.getUnfinishedMessage(); 10452 throw e; 10453 } finally { 10454 if (parsedMessage != null) { 10455 mergeFrom(parsedMessage); 10456 } 10457 } 10458 return this; 10459 } 10460 private int bitField0_; 10461 10462 // required bytes first = 1; 10463 private com.google.protobuf.ByteString first_ = com.google.protobuf.ByteString.EMPTY; 10464 /** 10465 * <code>required bytes first = 1;</code> 10466 */ hasFirst()10467 public boolean hasFirst() { 10468 return ((bitField0_ & 0x00000001) == 0x00000001); 10469 } 10470 /** 10471 * <code>required bytes first = 1;</code> 10472 */ getFirst()10473 public com.google.protobuf.ByteString getFirst() { 10474 return first_; 10475 } 10476 /** 10477 * <code>required bytes first = 1;</code> 10478 */ setFirst(com.google.protobuf.ByteString value)10479 public Builder setFirst(com.google.protobuf.ByteString value) { 10480 if (value == null) { 10481 throw new NullPointerException(); 10482 } 10483 bitField0_ |= 0x00000001; 10484 first_ = value; 10485 onChanged(); 10486 return this; 10487 } 10488 /** 10489 * <code>required bytes first = 1;</code> 10490 */ clearFirst()10491 public Builder clearFirst() { 10492 bitField0_ = (bitField0_ & ~0x00000001); 10493 first_ = getDefaultInstance().getFirst(); 10494 onChanged(); 10495 return this; 10496 } 10497 10498 // required bytes second = 2; 10499 private com.google.protobuf.ByteString second_ = com.google.protobuf.ByteString.EMPTY; 10500 /** 10501 * <code>required bytes second = 2;</code> 10502 */ hasSecond()10503 public boolean hasSecond() { 10504 return ((bitField0_ & 0x00000002) == 0x00000002); 10505 } 10506 /** 10507 * <code>required bytes second = 2;</code> 10508 */ getSecond()10509 public com.google.protobuf.ByteString getSecond() { 10510 return second_; 10511 } 10512 /** 10513 * <code>required bytes second = 2;</code> 10514 */ setSecond(com.google.protobuf.ByteString value)10515 public Builder setSecond(com.google.protobuf.ByteString value) { 10516 if (value == null) { 10517 throw new NullPointerException(); 10518 } 10519 bitField0_ |= 0x00000002; 10520 second_ = value; 10521 onChanged(); 10522 return this; 10523 } 10524 /** 10525 * <code>required bytes second = 2;</code> 10526 */ clearSecond()10527 public Builder clearSecond() { 10528 bitField0_ = (bitField0_ & ~0x00000002); 10529 second_ = getDefaultInstance().getSecond(); 10530 onChanged(); 10531 return this; 10532 } 10533 10534 // @@protoc_insertion_point(builder_scope:BytesBytesPair) 10535 } 10536 10537 static { 10538 defaultInstance = new BytesBytesPair(true); defaultInstance.initFields()10539 defaultInstance.initFields(); 10540 } 10541 10542 // @@protoc_insertion_point(class_scope:BytesBytesPair) 10543 } 10544 10545 public interface NameInt64PairOrBuilder 10546 extends com.google.protobuf.MessageOrBuilder { 10547 10548 // optional string name = 1; 10549 /** 10550 * <code>optional string name = 1;</code> 10551 */ hasName()10552 boolean hasName(); 10553 /** 10554 * <code>optional string name = 1;</code> 10555 */ getName()10556 java.lang.String getName(); 10557 /** 10558 * <code>optional string name = 1;</code> 10559 */ 10560 com.google.protobuf.ByteString getNameBytes()10561 getNameBytes(); 10562 10563 // optional int64 value = 2; 10564 /** 10565 * <code>optional int64 value = 2;</code> 10566 */ hasValue()10567 boolean hasValue(); 10568 /** 10569 * <code>optional int64 value = 2;</code> 10570 */ getValue()10571 long getValue(); 10572 } 10573 /** 10574 * Protobuf type {@code NameInt64Pair} 10575 */ 10576 public static final class NameInt64Pair extends 10577 com.google.protobuf.GeneratedMessage 10578 implements NameInt64PairOrBuilder { 10579 // Use NameInt64Pair.newBuilder() to construct. NameInt64Pair(com.google.protobuf.GeneratedMessage.Builder<?> builder)10580 private NameInt64Pair(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 10581 super(builder); 10582 this.unknownFields = builder.getUnknownFields(); 10583 } NameInt64Pair(boolean noInit)10584 private NameInt64Pair(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 10585 10586 private static final NameInt64Pair defaultInstance; getDefaultInstance()10587 public static NameInt64Pair getDefaultInstance() { 10588 return defaultInstance; 10589 } 10590 getDefaultInstanceForType()10591 public NameInt64Pair getDefaultInstanceForType() { 10592 return defaultInstance; 10593 } 10594 10595 private final com.google.protobuf.UnknownFieldSet unknownFields; 10596 @java.lang.Override 10597 public final com.google.protobuf.UnknownFieldSet getUnknownFields()10598 getUnknownFields() { 10599 return this.unknownFields; 10600 } NameInt64Pair( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10601 private NameInt64Pair( 10602 com.google.protobuf.CodedInputStream input, 10603 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10604 throws com.google.protobuf.InvalidProtocolBufferException { 10605 initFields(); 10606 int mutable_bitField0_ = 0; 10607 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 10608 com.google.protobuf.UnknownFieldSet.newBuilder(); 10609 try { 10610 boolean done = false; 10611 while (!done) { 10612 int tag = input.readTag(); 10613 switch (tag) { 10614 case 0: 10615 done = true; 10616 break; 10617 default: { 10618 if (!parseUnknownField(input, unknownFields, 10619 extensionRegistry, tag)) { 10620 done = true; 10621 } 10622 break; 10623 } 10624 case 10: { 10625 bitField0_ |= 0x00000001; 10626 name_ = input.readBytes(); 10627 break; 10628 } 10629 case 16: { 10630 bitField0_ |= 0x00000002; 10631 value_ = input.readInt64(); 10632 break; 10633 } 10634 } 10635 } 10636 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10637 throw e.setUnfinishedMessage(this); 10638 } catch (java.io.IOException e) { 10639 throw new com.google.protobuf.InvalidProtocolBufferException( 10640 e.getMessage()).setUnfinishedMessage(this); 10641 } finally { 10642 this.unknownFields = unknownFields.build(); 10643 makeExtensionsImmutable(); 10644 } 10645 } 10646 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10647 getDescriptor() { 10648 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_descriptor; 10649 } 10650 10651 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10652 internalGetFieldAccessorTable() { 10653 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_fieldAccessorTable 10654 .ensureFieldAccessorsInitialized( 10655 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); 10656 } 10657 10658 public static com.google.protobuf.Parser<NameInt64Pair> PARSER = 10659 new com.google.protobuf.AbstractParser<NameInt64Pair>() { 10660 public NameInt64Pair parsePartialFrom( 10661 com.google.protobuf.CodedInputStream input, 10662 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10663 throws com.google.protobuf.InvalidProtocolBufferException { 10664 return new NameInt64Pair(input, extensionRegistry); 10665 } 10666 }; 10667 10668 @java.lang.Override getParserForType()10669 public com.google.protobuf.Parser<NameInt64Pair> getParserForType() { 10670 return PARSER; 10671 } 10672 10673 private int bitField0_; 10674 // optional string name = 1; 10675 public static final int NAME_FIELD_NUMBER = 1; 10676 private java.lang.Object name_; 10677 /** 10678 * <code>optional string name = 1;</code> 10679 */ hasName()10680 public boolean hasName() { 10681 return ((bitField0_ & 0x00000001) == 0x00000001); 10682 } 10683 /** 10684 * <code>optional string name = 1;</code> 10685 */ getName()10686 public java.lang.String getName() { 10687 java.lang.Object ref = name_; 10688 if (ref instanceof java.lang.String) { 10689 return (java.lang.String) ref; 10690 } else { 10691 com.google.protobuf.ByteString bs = 10692 (com.google.protobuf.ByteString) ref; 10693 java.lang.String s = bs.toStringUtf8(); 10694 if (bs.isValidUtf8()) { 10695 name_ = s; 10696 } 10697 return s; 10698 } 10699 } 10700 /** 10701 * <code>optional string name = 1;</code> 10702 */ 10703 public com.google.protobuf.ByteString getNameBytes()10704 getNameBytes() { 10705 java.lang.Object ref = name_; 10706 if (ref instanceof java.lang.String) { 10707 com.google.protobuf.ByteString b = 10708 com.google.protobuf.ByteString.copyFromUtf8( 10709 (java.lang.String) ref); 10710 name_ = b; 10711 return b; 10712 } else { 10713 return (com.google.protobuf.ByteString) ref; 10714 } 10715 } 10716 10717 // optional int64 value = 2; 10718 public static final int VALUE_FIELD_NUMBER = 2; 10719 private long value_; 10720 /** 10721 * <code>optional int64 value = 2;</code> 10722 */ hasValue()10723 public boolean hasValue() { 10724 return ((bitField0_ & 0x00000002) == 0x00000002); 10725 } 10726 /** 10727 * <code>optional int64 value = 2;</code> 10728 */ getValue()10729 public long getValue() { 10730 return value_; 10731 } 10732 initFields()10733 private void initFields() { 10734 name_ = ""; 10735 value_ = 0L; 10736 } 10737 private byte memoizedIsInitialized = -1; isInitialized()10738 public final boolean isInitialized() { 10739 byte isInitialized = memoizedIsInitialized; 10740 if (isInitialized != -1) return isInitialized == 1; 10741 10742 memoizedIsInitialized = 1; 10743 return true; 10744 } 10745 writeTo(com.google.protobuf.CodedOutputStream output)10746 public void writeTo(com.google.protobuf.CodedOutputStream output) 10747 throws java.io.IOException { 10748 getSerializedSize(); 10749 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10750 output.writeBytes(1, getNameBytes()); 10751 } 10752 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10753 output.writeInt64(2, value_); 10754 } 10755 getUnknownFields().writeTo(output); 10756 } 10757 10758 private int memoizedSerializedSize = -1; getSerializedSize()10759 public int getSerializedSize() { 10760 int size = memoizedSerializedSize; 10761 if (size != -1) return size; 10762 10763 size = 0; 10764 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10765 size += com.google.protobuf.CodedOutputStream 10766 .computeBytesSize(1, getNameBytes()); 10767 } 10768 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10769 size += com.google.protobuf.CodedOutputStream 10770 .computeInt64Size(2, value_); 10771 } 10772 size += getUnknownFields().getSerializedSize(); 10773 memoizedSerializedSize = size; 10774 return size; 10775 } 10776 10777 private static final long serialVersionUID = 0L; 10778 @java.lang.Override writeReplace()10779 protected java.lang.Object writeReplace() 10780 throws java.io.ObjectStreamException { 10781 return super.writeReplace(); 10782 } 10783 10784 @java.lang.Override equals(final java.lang.Object obj)10785 public boolean equals(final java.lang.Object obj) { 10786 if (obj == this) { 10787 return true; 10788 } 10789 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair)) { 10790 return super.equals(obj); 10791 } 10792 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) obj; 10793 10794 boolean result = true; 10795 result = result && (hasName() == other.hasName()); 10796 if (hasName()) { 10797 result = result && getName() 10798 .equals(other.getName()); 10799 } 10800 result = result && (hasValue() == other.hasValue()); 10801 if (hasValue()) { 10802 result = result && (getValue() 10803 == other.getValue()); 10804 } 10805 result = result && 10806 getUnknownFields().equals(other.getUnknownFields()); 10807 return result; 10808 } 10809 10810 private int memoizedHashCode = 0; 10811 @java.lang.Override hashCode()10812 public int hashCode() { 10813 if (memoizedHashCode != 0) { 10814 return memoizedHashCode; 10815 } 10816 int hash = 41; 10817 hash = (19 * hash) + getDescriptorForType().hashCode(); 10818 if (hasName()) { 10819 hash = (37 * hash) + NAME_FIELD_NUMBER; 10820 hash = (53 * hash) + getName().hashCode(); 10821 } 10822 if (hasValue()) { 10823 hash = (37 * hash) + VALUE_FIELD_NUMBER; 10824 hash = (53 * hash) + hashLong(getValue()); 10825 } 10826 hash = (29 * hash) + getUnknownFields().hashCode(); 10827 memoizedHashCode = hash; 10828 return hash; 10829 } 10830 parseFrom( com.google.protobuf.ByteString data)10831 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( 10832 com.google.protobuf.ByteString data) 10833 throws com.google.protobuf.InvalidProtocolBufferException { 10834 return PARSER.parseFrom(data); 10835 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10836 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( 10837 com.google.protobuf.ByteString data, 10838 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10839 throws com.google.protobuf.InvalidProtocolBufferException { 10840 return PARSER.parseFrom(data, extensionRegistry); 10841 } parseFrom(byte[] data)10842 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(byte[] data) 10843 throws com.google.protobuf.InvalidProtocolBufferException { 10844 return PARSER.parseFrom(data); 10845 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10846 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( 10847 byte[] data, 10848 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10849 throws com.google.protobuf.InvalidProtocolBufferException { 10850 return PARSER.parseFrom(data, extensionRegistry); 10851 } parseFrom(java.io.InputStream input)10852 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom(java.io.InputStream input) 10853 throws java.io.IOException { 10854 return PARSER.parseFrom(input); 10855 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10856 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( 10857 java.io.InputStream input, 10858 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10859 throws java.io.IOException { 10860 return PARSER.parseFrom(input, extensionRegistry); 10861 } parseDelimitedFrom(java.io.InputStream input)10862 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom(java.io.InputStream input) 10863 throws java.io.IOException { 10864 return PARSER.parseDelimitedFrom(input); 10865 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10866 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseDelimitedFrom( 10867 java.io.InputStream input, 10868 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10869 throws java.io.IOException { 10870 return PARSER.parseDelimitedFrom(input, extensionRegistry); 10871 } parseFrom( com.google.protobuf.CodedInputStream input)10872 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( 10873 com.google.protobuf.CodedInputStream input) 10874 throws java.io.IOException { 10875 return PARSER.parseFrom(input); 10876 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10877 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parseFrom( 10878 com.google.protobuf.CodedInputStream input, 10879 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10880 throws java.io.IOException { 10881 return PARSER.parseFrom(input, extensionRegistry); 10882 } 10883 newBuilder()10884 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()10885 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair prototype)10886 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair prototype) { 10887 return newBuilder().mergeFrom(prototype); 10888 } toBuilder()10889 public Builder toBuilder() { return newBuilder(this); } 10890 10891 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10892 protected Builder newBuilderForType( 10893 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10894 Builder builder = new Builder(parent); 10895 return builder; 10896 } 10897 /** 10898 * Protobuf type {@code NameInt64Pair} 10899 */ 10900 public static final class Builder extends 10901 com.google.protobuf.GeneratedMessage.Builder<Builder> 10902 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder { 10903 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10904 getDescriptor() { 10905 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_descriptor; 10906 } 10907 10908 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10909 internalGetFieldAccessorTable() { 10910 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_fieldAccessorTable 10911 .ensureFieldAccessorsInitialized( 10912 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder.class); 10913 } 10914 10915 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder() Builder()10916 private Builder() { 10917 maybeForceBuilderInitialization(); 10918 } 10919 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10920 private Builder( 10921 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10922 super(parent); 10923 maybeForceBuilderInitialization(); 10924 } maybeForceBuilderInitialization()10925 private void maybeForceBuilderInitialization() { 10926 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 10927 } 10928 } create()10929 private static Builder create() { 10930 return new Builder(); 10931 } 10932 clear()10933 public Builder clear() { 10934 super.clear(); 10935 name_ = ""; 10936 bitField0_ = (bitField0_ & ~0x00000001); 10937 value_ = 0L; 10938 bitField0_ = (bitField0_ & ~0x00000002); 10939 return this; 10940 } 10941 clone()10942 public Builder clone() { 10943 return create().mergeFrom(buildPartial()); 10944 } 10945 10946 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()10947 getDescriptorForType() { 10948 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_NameInt64Pair_descriptor; 10949 } 10950 getDefaultInstanceForType()10951 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getDefaultInstanceForType() { 10952 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance(); 10953 } 10954 build()10955 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair build() { 10956 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = buildPartial(); 10957 if (!result.isInitialized()) { 10958 throw newUninitializedMessageException(result); 10959 } 10960 return result; 10961 } 10962 buildPartial()10963 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair buildPartial() { 10964 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair(this); 10965 int from_bitField0_ = bitField0_; 10966 int to_bitField0_ = 0; 10967 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 10968 to_bitField0_ |= 0x00000001; 10969 } 10970 result.name_ = name_; 10971 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 10972 to_bitField0_ |= 0x00000002; 10973 } 10974 result.value_ = value_; 10975 result.bitField0_ = to_bitField0_; 10976 onBuilt(); 10977 return result; 10978 } 10979 mergeFrom(com.google.protobuf.Message other)10980 public Builder mergeFrom(com.google.protobuf.Message other) { 10981 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) { 10982 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair)other); 10983 } else { 10984 super.mergeFrom(other); 10985 return this; 10986 } 10987 } 10988 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other)10989 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair other) { 10990 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance()) return this; 10991 if (other.hasName()) { 10992 bitField0_ |= 0x00000001; 10993 name_ = other.name_; 10994 onChanged(); 10995 } 10996 if (other.hasValue()) { 10997 setValue(other.getValue()); 10998 } 10999 this.mergeUnknownFields(other.getUnknownFields()); 11000 return this; 11001 } 11002 isInitialized()11003 public final boolean isInitialized() { 11004 return true; 11005 } 11006 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11007 public Builder mergeFrom( 11008 com.google.protobuf.CodedInputStream input, 11009 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11010 throws java.io.IOException { 11011 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair parsedMessage = null; 11012 try { 11013 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 11014 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 11015 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair) e.getUnfinishedMessage(); 11016 throw e; 11017 } finally { 11018 if (parsedMessage != null) { 11019 mergeFrom(parsedMessage); 11020 } 11021 } 11022 return this; 11023 } 11024 private int bitField0_; 11025 11026 // optional string name = 1; 11027 private java.lang.Object name_ = ""; 11028 /** 11029 * <code>optional string name = 1;</code> 11030 */ hasName()11031 public boolean hasName() { 11032 return ((bitField0_ & 0x00000001) == 0x00000001); 11033 } 11034 /** 11035 * <code>optional string name = 1;</code> 11036 */ getName()11037 public java.lang.String getName() { 11038 java.lang.Object ref = name_; 11039 if (!(ref instanceof java.lang.String)) { 11040 java.lang.String s = ((com.google.protobuf.ByteString) ref) 11041 .toStringUtf8(); 11042 name_ = s; 11043 return s; 11044 } else { 11045 return (java.lang.String) ref; 11046 } 11047 } 11048 /** 11049 * <code>optional string name = 1;</code> 11050 */ 11051 public com.google.protobuf.ByteString getNameBytes()11052 getNameBytes() { 11053 java.lang.Object ref = name_; 11054 if (ref instanceof String) { 11055 com.google.protobuf.ByteString b = 11056 com.google.protobuf.ByteString.copyFromUtf8( 11057 (java.lang.String) ref); 11058 name_ = b; 11059 return b; 11060 } else { 11061 return (com.google.protobuf.ByteString) ref; 11062 } 11063 } 11064 /** 11065 * <code>optional string name = 1;</code> 11066 */ setName( java.lang.String value)11067 public Builder setName( 11068 java.lang.String value) { 11069 if (value == null) { 11070 throw new NullPointerException(); 11071 } 11072 bitField0_ |= 0x00000001; 11073 name_ = value; 11074 onChanged(); 11075 return this; 11076 } 11077 /** 11078 * <code>optional string name = 1;</code> 11079 */ clearName()11080 public Builder clearName() { 11081 bitField0_ = (bitField0_ & ~0x00000001); 11082 name_ = getDefaultInstance().getName(); 11083 onChanged(); 11084 return this; 11085 } 11086 /** 11087 * <code>optional string name = 1;</code> 11088 */ setNameBytes( com.google.protobuf.ByteString value)11089 public Builder setNameBytes( 11090 com.google.protobuf.ByteString value) { 11091 if (value == null) { 11092 throw new NullPointerException(); 11093 } 11094 bitField0_ |= 0x00000001; 11095 name_ = value; 11096 onChanged(); 11097 return this; 11098 } 11099 11100 // optional int64 value = 2; 11101 private long value_ ; 11102 /** 11103 * <code>optional int64 value = 2;</code> 11104 */ hasValue()11105 public boolean hasValue() { 11106 return ((bitField0_ & 0x00000002) == 0x00000002); 11107 } 11108 /** 11109 * <code>optional int64 value = 2;</code> 11110 */ getValue()11111 public long getValue() { 11112 return value_; 11113 } 11114 /** 11115 * <code>optional int64 value = 2;</code> 11116 */ setValue(long value)11117 public Builder setValue(long value) { 11118 bitField0_ |= 0x00000002; 11119 value_ = value; 11120 onChanged(); 11121 return this; 11122 } 11123 /** 11124 * <code>optional int64 value = 2;</code> 11125 */ clearValue()11126 public Builder clearValue() { 11127 bitField0_ = (bitField0_ & ~0x00000002); 11128 value_ = 0L; 11129 onChanged(); 11130 return this; 11131 } 11132 11133 // @@protoc_insertion_point(builder_scope:NameInt64Pair) 11134 } 11135 11136 static { 11137 defaultInstance = new NameInt64Pair(true); defaultInstance.initFields()11138 defaultInstance.initFields(); 11139 } 11140 11141 // @@protoc_insertion_point(class_scope:NameInt64Pair) 11142 } 11143 11144 public interface SnapshotDescriptionOrBuilder 11145 extends com.google.protobuf.MessageOrBuilder { 11146 11147 // required string name = 1; 11148 /** 11149 * <code>required string name = 1;</code> 11150 */ hasName()11151 boolean hasName(); 11152 /** 11153 * <code>required string name = 1;</code> 11154 */ getName()11155 java.lang.String getName(); 11156 /** 11157 * <code>required string name = 1;</code> 11158 */ 11159 com.google.protobuf.ByteString getNameBytes()11160 getNameBytes(); 11161 11162 // optional string table = 2; 11163 /** 11164 * <code>optional string table = 2;</code> 11165 * 11166 * <pre> 11167 * not needed for delete, but checked for in taking snapshot 11168 * </pre> 11169 */ hasTable()11170 boolean hasTable(); 11171 /** 11172 * <code>optional string table = 2;</code> 11173 * 11174 * <pre> 11175 * not needed for delete, but checked for in taking snapshot 11176 * </pre> 11177 */ getTable()11178 java.lang.String getTable(); 11179 /** 11180 * <code>optional string table = 2;</code> 11181 * 11182 * <pre> 11183 * not needed for delete, but checked for in taking snapshot 11184 * </pre> 11185 */ 11186 com.google.protobuf.ByteString getTableBytes()11187 getTableBytes(); 11188 11189 // optional int64 creation_time = 3 [default = 0]; 11190 /** 11191 * <code>optional int64 creation_time = 3 [default = 0];</code> 11192 */ hasCreationTime()11193 boolean hasCreationTime(); 11194 /** 11195 * <code>optional int64 creation_time = 3 [default = 0];</code> 11196 */ getCreationTime()11197 long getCreationTime(); 11198 11199 // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; 11200 /** 11201 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 11202 */ hasType()11203 boolean hasType(); 11204 /** 11205 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 11206 */ getType()11207 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType(); 11208 11209 // optional int32 version = 5; 11210 /** 11211 * <code>optional int32 version = 5;</code> 11212 */ hasVersion()11213 boolean hasVersion(); 11214 /** 11215 * <code>optional int32 version = 5;</code> 11216 */ getVersion()11217 int getVersion(); 11218 11219 // optional string owner = 6; 11220 /** 11221 * <code>optional string owner = 6;</code> 11222 */ hasOwner()11223 boolean hasOwner(); 11224 /** 11225 * <code>optional string owner = 6;</code> 11226 */ getOwner()11227 java.lang.String getOwner(); 11228 /** 11229 * <code>optional string owner = 6;</code> 11230 */ 11231 com.google.protobuf.ByteString getOwnerBytes()11232 getOwnerBytes(); 11233 } 11234 /** 11235 * Protobuf type {@code SnapshotDescription} 11236 * 11237 * <pre> 11238 ** 11239 * Description of the snapshot to take 11240 * </pre> 11241 */ 11242 public static final class SnapshotDescription extends 11243 com.google.protobuf.GeneratedMessage 11244 implements SnapshotDescriptionOrBuilder { 11245 // Use SnapshotDescription.newBuilder() to construct. SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder)11246 private SnapshotDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 11247 super(builder); 11248 this.unknownFields = builder.getUnknownFields(); 11249 } SnapshotDescription(boolean noInit)11250 private SnapshotDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 11251 11252 private static final SnapshotDescription defaultInstance; getDefaultInstance()11253 public static SnapshotDescription getDefaultInstance() { 11254 return defaultInstance; 11255 } 11256 getDefaultInstanceForType()11257 public SnapshotDescription getDefaultInstanceForType() { 11258 return defaultInstance; 11259 } 11260 11261 private final com.google.protobuf.UnknownFieldSet unknownFields; 11262 @java.lang.Override 11263 public final com.google.protobuf.UnknownFieldSet getUnknownFields()11264 getUnknownFields() { 11265 return this.unknownFields; 11266 } SnapshotDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11267 private SnapshotDescription( 11268 com.google.protobuf.CodedInputStream input, 11269 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11270 throws com.google.protobuf.InvalidProtocolBufferException { 11271 initFields(); 11272 int mutable_bitField0_ = 0; 11273 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 11274 com.google.protobuf.UnknownFieldSet.newBuilder(); 11275 try { 11276 boolean done = false; 11277 while (!done) { 11278 int tag = input.readTag(); 11279 switch (tag) { 11280 case 0: 11281 done = true; 11282 break; 11283 default: { 11284 if (!parseUnknownField(input, unknownFields, 11285 extensionRegistry, tag)) { 11286 done = true; 11287 } 11288 break; 11289 } 11290 case 10: { 11291 bitField0_ |= 0x00000001; 11292 name_ = input.readBytes(); 11293 break; 11294 } 11295 case 18: { 11296 bitField0_ |= 0x00000002; 11297 table_ = input.readBytes(); 11298 break; 11299 } 11300 case 24: { 11301 bitField0_ |= 0x00000004; 11302 creationTime_ = input.readInt64(); 11303 break; 11304 } 11305 case 32: { 11306 int rawValue = input.readEnum(); 11307 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.valueOf(rawValue); 11308 if (value == null) { 11309 unknownFields.mergeVarintField(4, rawValue); 11310 } else { 11311 bitField0_ |= 0x00000008; 11312 type_ = value; 11313 } 11314 break; 11315 } 11316 case 40: { 11317 bitField0_ |= 0x00000010; 11318 version_ = input.readInt32(); 11319 break; 11320 } 11321 case 50: { 11322 bitField0_ |= 0x00000020; 11323 owner_ = input.readBytes(); 11324 break; 11325 } 11326 } 11327 } 11328 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 11329 throw e.setUnfinishedMessage(this); 11330 } catch (java.io.IOException e) { 11331 throw new com.google.protobuf.InvalidProtocolBufferException( 11332 e.getMessage()).setUnfinishedMessage(this); 11333 } finally { 11334 this.unknownFields = unknownFields.build(); 11335 makeExtensionsImmutable(); 11336 } 11337 } 11338 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()11339 getDescriptor() { 11340 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor; 11341 } 11342 11343 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()11344 internalGetFieldAccessorTable() { 11345 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable 11346 .ensureFieldAccessorsInitialized( 11347 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); 11348 } 11349 11350 public static com.google.protobuf.Parser<SnapshotDescription> PARSER = 11351 new com.google.protobuf.AbstractParser<SnapshotDescription>() { 11352 public SnapshotDescription parsePartialFrom( 11353 com.google.protobuf.CodedInputStream input, 11354 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11355 throws com.google.protobuf.InvalidProtocolBufferException { 11356 return new SnapshotDescription(input, extensionRegistry); 11357 } 11358 }; 11359 11360 @java.lang.Override getParserForType()11361 public com.google.protobuf.Parser<SnapshotDescription> getParserForType() { 11362 return PARSER; 11363 } 11364 11365 /** 11366 * Protobuf enum {@code SnapshotDescription.Type} 11367 */ 11368 public enum Type 11369 implements com.google.protobuf.ProtocolMessageEnum { 11370 /** 11371 * <code>DISABLED = 0;</code> 11372 */ 11373 DISABLED(0, 0), 11374 /** 11375 * <code>FLUSH = 1;</code> 11376 */ 11377 FLUSH(1, 1), 11378 /** 11379 * <code>SKIPFLUSH = 2;</code> 11380 */ 11381 SKIPFLUSH(2, 2), 11382 ; 11383 11384 /** 11385 * <code>DISABLED = 0;</code> 11386 */ 11387 public static final int DISABLED_VALUE = 0; 11388 /** 11389 * <code>FLUSH = 1;</code> 11390 */ 11391 public static final int FLUSH_VALUE = 1; 11392 /** 11393 * <code>SKIPFLUSH = 2;</code> 11394 */ 11395 public static final int SKIPFLUSH_VALUE = 2; 11396 11397 getNumber()11398 public final int getNumber() { return value; } 11399 valueOf(int value)11400 public static Type valueOf(int value) { 11401 switch (value) { 11402 case 0: return DISABLED; 11403 case 1: return FLUSH; 11404 case 2: return SKIPFLUSH; 11405 default: return null; 11406 } 11407 } 11408 11409 public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap()11410 internalGetValueMap() { 11411 return internalValueMap; 11412 } 11413 private static com.google.protobuf.Internal.EnumLiteMap<Type> 11414 internalValueMap = 11415 new com.google.protobuf.Internal.EnumLiteMap<Type>() { 11416 public Type findValueByNumber(int number) { 11417 return Type.valueOf(number); 11418 } 11419 }; 11420 11421 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()11422 getValueDescriptor() { 11423 return getDescriptor().getValues().get(index); 11424 } 11425 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()11426 getDescriptorForType() { 11427 return getDescriptor(); 11428 } 11429 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()11430 getDescriptor() { 11431 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDescriptor().getEnumTypes().get(0); 11432 } 11433 11434 private static final Type[] VALUES = values(); 11435 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)11436 public static Type valueOf( 11437 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 11438 if (desc.getType() != getDescriptor()) { 11439 throw new java.lang.IllegalArgumentException( 11440 "EnumValueDescriptor is not for this type."); 11441 } 11442 return VALUES[desc.getIndex()]; 11443 } 11444 11445 private final int index; 11446 private final int value; 11447 Type(int index, int value)11448 private Type(int index, int value) { 11449 this.index = index; 11450 this.value = value; 11451 } 11452 11453 // @@protoc_insertion_point(enum_scope:SnapshotDescription.Type) 11454 } 11455 11456 private int bitField0_; 11457 // required string name = 1; 11458 public static final int NAME_FIELD_NUMBER = 1; 11459 private java.lang.Object name_; 11460 /** 11461 * <code>required string name = 1;</code> 11462 */ hasName()11463 public boolean hasName() { 11464 return ((bitField0_ & 0x00000001) == 0x00000001); 11465 } 11466 /** 11467 * <code>required string name = 1;</code> 11468 */ getName()11469 public java.lang.String getName() { 11470 java.lang.Object ref = name_; 11471 if (ref instanceof java.lang.String) { 11472 return (java.lang.String) ref; 11473 } else { 11474 com.google.protobuf.ByteString bs = 11475 (com.google.protobuf.ByteString) ref; 11476 java.lang.String s = bs.toStringUtf8(); 11477 if (bs.isValidUtf8()) { 11478 name_ = s; 11479 } 11480 return s; 11481 } 11482 } 11483 /** 11484 * <code>required string name = 1;</code> 11485 */ 11486 public com.google.protobuf.ByteString getNameBytes()11487 getNameBytes() { 11488 java.lang.Object ref = name_; 11489 if (ref instanceof java.lang.String) { 11490 com.google.protobuf.ByteString b = 11491 com.google.protobuf.ByteString.copyFromUtf8( 11492 (java.lang.String) ref); 11493 name_ = b; 11494 return b; 11495 } else { 11496 return (com.google.protobuf.ByteString) ref; 11497 } 11498 } 11499 11500 // optional string table = 2; 11501 public static final int TABLE_FIELD_NUMBER = 2; 11502 private java.lang.Object table_; 11503 /** 11504 * <code>optional string table = 2;</code> 11505 * 11506 * <pre> 11507 * not needed for delete, but checked for in taking snapshot 11508 * </pre> 11509 */ hasTable()11510 public boolean hasTable() { 11511 return ((bitField0_ & 0x00000002) == 0x00000002); 11512 } 11513 /** 11514 * <code>optional string table = 2;</code> 11515 * 11516 * <pre> 11517 * not needed for delete, but checked for in taking snapshot 11518 * </pre> 11519 */ getTable()11520 public java.lang.String getTable() { 11521 java.lang.Object ref = table_; 11522 if (ref instanceof java.lang.String) { 11523 return (java.lang.String) ref; 11524 } else { 11525 com.google.protobuf.ByteString bs = 11526 (com.google.protobuf.ByteString) ref; 11527 java.lang.String s = bs.toStringUtf8(); 11528 if (bs.isValidUtf8()) { 11529 table_ = s; 11530 } 11531 return s; 11532 } 11533 } 11534 /** 11535 * <code>optional string table = 2;</code> 11536 * 11537 * <pre> 11538 * not needed for delete, but checked for in taking snapshot 11539 * </pre> 11540 */ 11541 public com.google.protobuf.ByteString getTableBytes()11542 getTableBytes() { 11543 java.lang.Object ref = table_; 11544 if (ref instanceof java.lang.String) { 11545 com.google.protobuf.ByteString b = 11546 com.google.protobuf.ByteString.copyFromUtf8( 11547 (java.lang.String) ref); 11548 table_ = b; 11549 return b; 11550 } else { 11551 return (com.google.protobuf.ByteString) ref; 11552 } 11553 } 11554 11555 // optional int64 creation_time = 3 [default = 0]; 11556 public static final int CREATION_TIME_FIELD_NUMBER = 3; 11557 private long creationTime_; 11558 /** 11559 * <code>optional int64 creation_time = 3 [default = 0];</code> 11560 */ hasCreationTime()11561 public boolean hasCreationTime() { 11562 return ((bitField0_ & 0x00000004) == 0x00000004); 11563 } 11564 /** 11565 * <code>optional int64 creation_time = 3 [default = 0];</code> 11566 */ getCreationTime()11567 public long getCreationTime() { 11568 return creationTime_; 11569 } 11570 11571 // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; 11572 public static final int TYPE_FIELD_NUMBER = 4; 11573 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_; 11574 /** 11575 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 11576 */ hasType()11577 public boolean hasType() { 11578 return ((bitField0_ & 0x00000008) == 0x00000008); 11579 } 11580 /** 11581 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 11582 */ getType()11583 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { 11584 return type_; 11585 } 11586 11587 // optional int32 version = 5; 11588 public static final int VERSION_FIELD_NUMBER = 5; 11589 private int version_; 11590 /** 11591 * <code>optional int32 version = 5;</code> 11592 */ hasVersion()11593 public boolean hasVersion() { 11594 return ((bitField0_ & 0x00000010) == 0x00000010); 11595 } 11596 /** 11597 * <code>optional int32 version = 5;</code> 11598 */ getVersion()11599 public int getVersion() { 11600 return version_; 11601 } 11602 11603 // optional string owner = 6; 11604 public static final int OWNER_FIELD_NUMBER = 6; 11605 private java.lang.Object owner_; 11606 /** 11607 * <code>optional string owner = 6;</code> 11608 */ hasOwner()11609 public boolean hasOwner() { 11610 return ((bitField0_ & 0x00000020) == 0x00000020); 11611 } 11612 /** 11613 * <code>optional string owner = 6;</code> 11614 */ getOwner()11615 public java.lang.String getOwner() { 11616 java.lang.Object ref = owner_; 11617 if (ref instanceof java.lang.String) { 11618 return (java.lang.String) ref; 11619 } else { 11620 com.google.protobuf.ByteString bs = 11621 (com.google.protobuf.ByteString) ref; 11622 java.lang.String s = bs.toStringUtf8(); 11623 if (bs.isValidUtf8()) { 11624 owner_ = s; 11625 } 11626 return s; 11627 } 11628 } 11629 /** 11630 * <code>optional string owner = 6;</code> 11631 */ 11632 public com.google.protobuf.ByteString getOwnerBytes()11633 getOwnerBytes() { 11634 java.lang.Object ref = owner_; 11635 if (ref instanceof java.lang.String) { 11636 com.google.protobuf.ByteString b = 11637 com.google.protobuf.ByteString.copyFromUtf8( 11638 (java.lang.String) ref); 11639 owner_ = b; 11640 return b; 11641 } else { 11642 return (com.google.protobuf.ByteString) ref; 11643 } 11644 } 11645 initFields()11646 private void initFields() { 11647 name_ = ""; 11648 table_ = ""; 11649 creationTime_ = 0L; 11650 type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; 11651 version_ = 0; 11652 owner_ = ""; 11653 } 11654 private byte memoizedIsInitialized = -1; isInitialized()11655 public final boolean isInitialized() { 11656 byte isInitialized = memoizedIsInitialized; 11657 if (isInitialized != -1) return isInitialized == 1; 11658 11659 if (!hasName()) { 11660 memoizedIsInitialized = 0; 11661 return false; 11662 } 11663 memoizedIsInitialized = 1; 11664 return true; 11665 } 11666 writeTo(com.google.protobuf.CodedOutputStream output)11667 public void writeTo(com.google.protobuf.CodedOutputStream output) 11668 throws java.io.IOException { 11669 getSerializedSize(); 11670 if (((bitField0_ & 0x00000001) == 0x00000001)) { 11671 output.writeBytes(1, getNameBytes()); 11672 } 11673 if (((bitField0_ & 0x00000002) == 0x00000002)) { 11674 output.writeBytes(2, getTableBytes()); 11675 } 11676 if (((bitField0_ & 0x00000004) == 0x00000004)) { 11677 output.writeInt64(3, creationTime_); 11678 } 11679 if (((bitField0_ & 0x00000008) == 0x00000008)) { 11680 output.writeEnum(4, type_.getNumber()); 11681 } 11682 if (((bitField0_ & 0x00000010) == 0x00000010)) { 11683 output.writeInt32(5, version_); 11684 } 11685 if (((bitField0_ & 0x00000020) == 0x00000020)) { 11686 output.writeBytes(6, getOwnerBytes()); 11687 } 11688 getUnknownFields().writeTo(output); 11689 } 11690 11691 private int memoizedSerializedSize = -1; getSerializedSize()11692 public int getSerializedSize() { 11693 int size = memoizedSerializedSize; 11694 if (size != -1) return size; 11695 11696 size = 0; 11697 if (((bitField0_ & 0x00000001) == 0x00000001)) { 11698 size += com.google.protobuf.CodedOutputStream 11699 .computeBytesSize(1, getNameBytes()); 11700 } 11701 if (((bitField0_ & 0x00000002) == 0x00000002)) { 11702 size += com.google.protobuf.CodedOutputStream 11703 .computeBytesSize(2, getTableBytes()); 11704 } 11705 if (((bitField0_ & 0x00000004) == 0x00000004)) { 11706 size += com.google.protobuf.CodedOutputStream 11707 .computeInt64Size(3, creationTime_); 11708 } 11709 if (((bitField0_ & 0x00000008) == 0x00000008)) { 11710 size += com.google.protobuf.CodedOutputStream 11711 .computeEnumSize(4, type_.getNumber()); 11712 } 11713 if (((bitField0_ & 0x00000010) == 0x00000010)) { 11714 size += com.google.protobuf.CodedOutputStream 11715 .computeInt32Size(5, version_); 11716 } 11717 if (((bitField0_ & 0x00000020) == 0x00000020)) { 11718 size += com.google.protobuf.CodedOutputStream 11719 .computeBytesSize(6, getOwnerBytes()); 11720 } 11721 size += getUnknownFields().getSerializedSize(); 11722 memoizedSerializedSize = size; 11723 return size; 11724 } 11725 11726 private static final long serialVersionUID = 0L; 11727 @java.lang.Override writeReplace()11728 protected java.lang.Object writeReplace() 11729 throws java.io.ObjectStreamException { 11730 return super.writeReplace(); 11731 } 11732 11733 @java.lang.Override equals(final java.lang.Object obj)11734 public boolean equals(final java.lang.Object obj) { 11735 if (obj == this) { 11736 return true; 11737 } 11738 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)) { 11739 return super.equals(obj); 11740 } 11741 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) obj; 11742 11743 boolean result = true; 11744 result = result && (hasName() == other.hasName()); 11745 if (hasName()) { 11746 result = result && getName() 11747 .equals(other.getName()); 11748 } 11749 result = result && (hasTable() == other.hasTable()); 11750 if (hasTable()) { 11751 result = result && getTable() 11752 .equals(other.getTable()); 11753 } 11754 result = result && (hasCreationTime() == other.hasCreationTime()); 11755 if (hasCreationTime()) { 11756 result = result && (getCreationTime() 11757 == other.getCreationTime()); 11758 } 11759 result = result && (hasType() == other.hasType()); 11760 if (hasType()) { 11761 result = result && 11762 (getType() == other.getType()); 11763 } 11764 result = result && (hasVersion() == other.hasVersion()); 11765 if (hasVersion()) { 11766 result = result && (getVersion() 11767 == other.getVersion()); 11768 } 11769 result = result && (hasOwner() == other.hasOwner()); 11770 if (hasOwner()) { 11771 result = result && getOwner() 11772 .equals(other.getOwner()); 11773 } 11774 result = result && 11775 getUnknownFields().equals(other.getUnknownFields()); 11776 return result; 11777 } 11778 11779 private int memoizedHashCode = 0; 11780 @java.lang.Override hashCode()11781 public int hashCode() { 11782 if (memoizedHashCode != 0) { 11783 return memoizedHashCode; 11784 } 11785 int hash = 41; 11786 hash = (19 * hash) + getDescriptorForType().hashCode(); 11787 if (hasName()) { 11788 hash = (37 * hash) + NAME_FIELD_NUMBER; 11789 hash = (53 * hash) + getName().hashCode(); 11790 } 11791 if (hasTable()) { 11792 hash = (37 * hash) + TABLE_FIELD_NUMBER; 11793 hash = (53 * hash) + getTable().hashCode(); 11794 } 11795 if (hasCreationTime()) { 11796 hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; 11797 hash = (53 * hash) + hashLong(getCreationTime()); 11798 } 11799 if (hasType()) { 11800 hash = (37 * hash) + TYPE_FIELD_NUMBER; 11801 hash = (53 * hash) + hashEnum(getType()); 11802 } 11803 if (hasVersion()) { 11804 hash = (37 * hash) + VERSION_FIELD_NUMBER; 11805 hash = (53 * hash) + getVersion(); 11806 } 11807 if (hasOwner()) { 11808 hash = (37 * hash) + OWNER_FIELD_NUMBER; 11809 hash = (53 * hash) + getOwner().hashCode(); 11810 } 11811 hash = (29 * hash) + getUnknownFields().hashCode(); 11812 memoizedHashCode = hash; 11813 return hash; 11814 } 11815 parseFrom( com.google.protobuf.ByteString data)11816 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( 11817 com.google.protobuf.ByteString data) 11818 throws com.google.protobuf.InvalidProtocolBufferException { 11819 return PARSER.parseFrom(data); 11820 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11821 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( 11822 com.google.protobuf.ByteString data, 11823 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11824 throws com.google.protobuf.InvalidProtocolBufferException { 11825 return PARSER.parseFrom(data, extensionRegistry); 11826 } parseFrom(byte[] data)11827 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(byte[] data) 11828 throws com.google.protobuf.InvalidProtocolBufferException { 11829 return PARSER.parseFrom(data); 11830 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11831 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( 11832 byte[] data, 11833 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11834 throws com.google.protobuf.InvalidProtocolBufferException { 11835 return PARSER.parseFrom(data, extensionRegistry); 11836 } parseFrom(java.io.InputStream input)11837 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom(java.io.InputStream input) 11838 throws java.io.IOException { 11839 return PARSER.parseFrom(input); 11840 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11841 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( 11842 java.io.InputStream input, 11843 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11844 throws java.io.IOException { 11845 return PARSER.parseFrom(input, extensionRegistry); 11846 } parseDelimitedFrom(java.io.InputStream input)11847 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom(java.io.InputStream input) 11848 throws java.io.IOException { 11849 return PARSER.parseDelimitedFrom(input); 11850 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11851 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseDelimitedFrom( 11852 java.io.InputStream input, 11853 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11854 throws java.io.IOException { 11855 return PARSER.parseDelimitedFrom(input, extensionRegistry); 11856 } parseFrom( com.google.protobuf.CodedInputStream input)11857 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( 11858 com.google.protobuf.CodedInputStream input) 11859 throws java.io.IOException { 11860 return PARSER.parseFrom(input); 11861 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)11862 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parseFrom( 11863 com.google.protobuf.CodedInputStream input, 11864 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 11865 throws java.io.IOException { 11866 return PARSER.parseFrom(input, extensionRegistry); 11867 } 11868 newBuilder()11869 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()11870 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype)11871 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription prototype) { 11872 return newBuilder().mergeFrom(prototype); 11873 } toBuilder()11874 public Builder toBuilder() { return newBuilder(this); } 11875 11876 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)11877 protected Builder newBuilderForType( 11878 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 11879 Builder builder = new Builder(parent); 11880 return builder; 11881 } 11882 /** 11883 * Protobuf type {@code SnapshotDescription} 11884 * 11885 * <pre> 11886 ** 11887 * Description of the snapshot to take 11888 * </pre> 11889 */ 11890 public static final class Builder extends 11891 com.google.protobuf.GeneratedMessage.Builder<Builder> 11892 implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptionOrBuilder { 11893 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()11894 getDescriptor() { 11895 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor; 11896 } 11897 11898 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()11899 internalGetFieldAccessorTable() { 11900 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_fieldAccessorTable 11901 .ensureFieldAccessorsInitialized( 11902 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Builder.class); 11903 } 11904 11905 // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.newBuilder() Builder()11906 private Builder() { 11907 maybeForceBuilderInitialization(); 11908 } 11909 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)11910 private Builder( 11911 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 11912 super(parent); 11913 maybeForceBuilderInitialization(); 11914 } maybeForceBuilderInitialization()11915 private void maybeForceBuilderInitialization() { 11916 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 11917 } 11918 } create()11919 private static Builder create() { 11920 return new Builder(); 11921 } 11922 clear()11923 public Builder clear() { 11924 super.clear(); 11925 name_ = ""; 11926 bitField0_ = (bitField0_ & ~0x00000001); 11927 table_ = ""; 11928 bitField0_ = (bitField0_ & ~0x00000002); 11929 creationTime_ = 0L; 11930 bitField0_ = (bitField0_ & ~0x00000004); 11931 type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; 11932 bitField0_ = (bitField0_ & ~0x00000008); 11933 version_ = 0; 11934 bitField0_ = (bitField0_ & ~0x00000010); 11935 owner_ = ""; 11936 bitField0_ = (bitField0_ & ~0x00000020); 11937 return this; 11938 } 11939 clone()11940 public Builder clone() { 11941 return create().mergeFrom(buildPartial()); 11942 } 11943 11944 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()11945 getDescriptorForType() { 11946 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_SnapshotDescription_descriptor; 11947 } 11948 getDefaultInstanceForType()11949 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription getDefaultInstanceForType() { 11950 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance(); 11951 } 11952 build()11953 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription build() { 11954 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = buildPartial(); 11955 if (!result.isInitialized()) { 11956 throw newUninitializedMessageException(result); 11957 } 11958 return result; 11959 } 11960 buildPartial()11961 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription buildPartial() { 11962 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription(this); 11963 int from_bitField0_ = bitField0_; 11964 int to_bitField0_ = 0; 11965 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 11966 to_bitField0_ |= 0x00000001; 11967 } 11968 result.name_ = name_; 11969 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 11970 to_bitField0_ |= 0x00000002; 11971 } 11972 result.table_ = table_; 11973 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 11974 to_bitField0_ |= 0x00000004; 11975 } 11976 result.creationTime_ = creationTime_; 11977 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 11978 to_bitField0_ |= 0x00000008; 11979 } 11980 result.type_ = type_; 11981 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 11982 to_bitField0_ |= 0x00000010; 11983 } 11984 result.version_ = version_; 11985 if (((from_bitField0_ & 0x00000020) == 0x00000020)) { 11986 to_bitField0_ |= 0x00000020; 11987 } 11988 result.owner_ = owner_; 11989 result.bitField0_ = to_bitField0_; 11990 onBuilt(); 11991 return result; 11992 } 11993 mergeFrom(com.google.protobuf.Message other)11994 public Builder mergeFrom(com.google.protobuf.Message other) { 11995 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) { 11996 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription)other); 11997 } else { 11998 super.mergeFrom(other); 11999 return this; 12000 } 12001 } 12002 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other)12003 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription other) { 12004 if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.getDefaultInstance()) return this; 12005 if (other.hasName()) { 12006 bitField0_ |= 0x00000001; 12007 name_ = other.name_; 12008 onChanged(); 12009 } 12010 if (other.hasTable()) { 12011 bitField0_ |= 0x00000002; 12012 table_ = other.table_; 12013 onChanged(); 12014 } 12015 if (other.hasCreationTime()) { 12016 setCreationTime(other.getCreationTime()); 12017 } 12018 if (other.hasType()) { 12019 setType(other.getType()); 12020 } 12021 if (other.hasVersion()) { 12022 setVersion(other.getVersion()); 12023 } 12024 if (other.hasOwner()) { 12025 bitField0_ |= 0x00000020; 12026 owner_ = other.owner_; 12027 onChanged(); 12028 } 12029 this.mergeUnknownFields(other.getUnknownFields()); 12030 return this; 12031 } 12032 isInitialized()12033 public final boolean isInitialized() { 12034 if (!hasName()) { 12035 12036 return false; 12037 } 12038 return true; 12039 } 12040 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12041 public Builder mergeFrom( 12042 com.google.protobuf.CodedInputStream input, 12043 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12044 throws java.io.IOException { 12045 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription parsedMessage = null; 12046 try { 12047 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 12048 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 12049 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription) e.getUnfinishedMessage(); 12050 throw e; 12051 } finally { 12052 if (parsedMessage != null) { 12053 mergeFrom(parsedMessage); 12054 } 12055 } 12056 return this; 12057 } 12058 private int bitField0_; 12059 12060 // required string name = 1; 12061 private java.lang.Object name_ = ""; 12062 /** 12063 * <code>required string name = 1;</code> 12064 */ hasName()12065 public boolean hasName() { 12066 return ((bitField0_ & 0x00000001) == 0x00000001); 12067 } 12068 /** 12069 * <code>required string name = 1;</code> 12070 */ getName()12071 public java.lang.String getName() { 12072 java.lang.Object ref = name_; 12073 if (!(ref instanceof java.lang.String)) { 12074 java.lang.String s = ((com.google.protobuf.ByteString) ref) 12075 .toStringUtf8(); 12076 name_ = s; 12077 return s; 12078 } else { 12079 return (java.lang.String) ref; 12080 } 12081 } 12082 /** 12083 * <code>required string name = 1;</code> 12084 */ 12085 public com.google.protobuf.ByteString getNameBytes()12086 getNameBytes() { 12087 java.lang.Object ref = name_; 12088 if (ref instanceof String) { 12089 com.google.protobuf.ByteString b = 12090 com.google.protobuf.ByteString.copyFromUtf8( 12091 (java.lang.String) ref); 12092 name_ = b; 12093 return b; 12094 } else { 12095 return (com.google.protobuf.ByteString) ref; 12096 } 12097 } 12098 /** 12099 * <code>required string name = 1;</code> 12100 */ setName( java.lang.String value)12101 public Builder setName( 12102 java.lang.String value) { 12103 if (value == null) { 12104 throw new NullPointerException(); 12105 } 12106 bitField0_ |= 0x00000001; 12107 name_ = value; 12108 onChanged(); 12109 return this; 12110 } 12111 /** 12112 * <code>required string name = 1;</code> 12113 */ clearName()12114 public Builder clearName() { 12115 bitField0_ = (bitField0_ & ~0x00000001); 12116 name_ = getDefaultInstance().getName(); 12117 onChanged(); 12118 return this; 12119 } 12120 /** 12121 * <code>required string name = 1;</code> 12122 */ setNameBytes( com.google.protobuf.ByteString value)12123 public Builder setNameBytes( 12124 com.google.protobuf.ByteString value) { 12125 if (value == null) { 12126 throw new NullPointerException(); 12127 } 12128 bitField0_ |= 0x00000001; 12129 name_ = value; 12130 onChanged(); 12131 return this; 12132 } 12133 12134 // optional string table = 2; 12135 private java.lang.Object table_ = ""; 12136 /** 12137 * <code>optional string table = 2;</code> 12138 * 12139 * <pre> 12140 * not needed for delete, but checked for in taking snapshot 12141 * </pre> 12142 */ hasTable()12143 public boolean hasTable() { 12144 return ((bitField0_ & 0x00000002) == 0x00000002); 12145 } 12146 /** 12147 * <code>optional string table = 2;</code> 12148 * 12149 * <pre> 12150 * not needed for delete, but checked for in taking snapshot 12151 * </pre> 12152 */ getTable()12153 public java.lang.String getTable() { 12154 java.lang.Object ref = table_; 12155 if (!(ref instanceof java.lang.String)) { 12156 java.lang.String s = ((com.google.protobuf.ByteString) ref) 12157 .toStringUtf8(); 12158 table_ = s; 12159 return s; 12160 } else { 12161 return (java.lang.String) ref; 12162 } 12163 } 12164 /** 12165 * <code>optional string table = 2;</code> 12166 * 12167 * <pre> 12168 * not needed for delete, but checked for in taking snapshot 12169 * </pre> 12170 */ 12171 public com.google.protobuf.ByteString getTableBytes()12172 getTableBytes() { 12173 java.lang.Object ref = table_; 12174 if (ref instanceof String) { 12175 com.google.protobuf.ByteString b = 12176 com.google.protobuf.ByteString.copyFromUtf8( 12177 (java.lang.String) ref); 12178 table_ = b; 12179 return b; 12180 } else { 12181 return (com.google.protobuf.ByteString) ref; 12182 } 12183 } 12184 /** 12185 * <code>optional string table = 2;</code> 12186 * 12187 * <pre> 12188 * not needed for delete, but checked for in taking snapshot 12189 * </pre> 12190 */ setTable( java.lang.String value)12191 public Builder setTable( 12192 java.lang.String value) { 12193 if (value == null) { 12194 throw new NullPointerException(); 12195 } 12196 bitField0_ |= 0x00000002; 12197 table_ = value; 12198 onChanged(); 12199 return this; 12200 } 12201 /** 12202 * <code>optional string table = 2;</code> 12203 * 12204 * <pre> 12205 * not needed for delete, but checked for in taking snapshot 12206 * </pre> 12207 */ clearTable()12208 public Builder clearTable() { 12209 bitField0_ = (bitField0_ & ~0x00000002); 12210 table_ = getDefaultInstance().getTable(); 12211 onChanged(); 12212 return this; 12213 } 12214 /** 12215 * <code>optional string table = 2;</code> 12216 * 12217 * <pre> 12218 * not needed for delete, but checked for in taking snapshot 12219 * </pre> 12220 */ setTableBytes( com.google.protobuf.ByteString value)12221 public Builder setTableBytes( 12222 com.google.protobuf.ByteString value) { 12223 if (value == null) { 12224 throw new NullPointerException(); 12225 } 12226 bitField0_ |= 0x00000002; 12227 table_ = value; 12228 onChanged(); 12229 return this; 12230 } 12231 12232 // optional int64 creation_time = 3 [default = 0]; 12233 private long creationTime_ ; 12234 /** 12235 * <code>optional int64 creation_time = 3 [default = 0];</code> 12236 */ hasCreationTime()12237 public boolean hasCreationTime() { 12238 return ((bitField0_ & 0x00000004) == 0x00000004); 12239 } 12240 /** 12241 * <code>optional int64 creation_time = 3 [default = 0];</code> 12242 */ getCreationTime()12243 public long getCreationTime() { 12244 return creationTime_; 12245 } 12246 /** 12247 * <code>optional int64 creation_time = 3 [default = 0];</code> 12248 */ setCreationTime(long value)12249 public Builder setCreationTime(long value) { 12250 bitField0_ |= 0x00000004; 12251 creationTime_ = value; 12252 onChanged(); 12253 return this; 12254 } 12255 /** 12256 * <code>optional int64 creation_time = 3 [default = 0];</code> 12257 */ clearCreationTime()12258 public Builder clearCreationTime() { 12259 bitField0_ = (bitField0_ & ~0x00000004); 12260 creationTime_ = 0L; 12261 onChanged(); 12262 return this; 12263 } 12264 12265 // optional .SnapshotDescription.Type type = 4 [default = FLUSH]; 12266 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; 12267 /** 12268 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 12269 */ hasType()12270 public boolean hasType() { 12271 return ((bitField0_ & 0x00000008) == 0x00000008); 12272 } 12273 /** 12274 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 12275 */ getType()12276 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type getType() { 12277 return type_; 12278 } 12279 /** 12280 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 12281 */ setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value)12282 public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type value) { 12283 if (value == null) { 12284 throw new NullPointerException(); 12285 } 12286 bitField0_ |= 0x00000008; 12287 type_ = value; 12288 onChanged(); 12289 return this; 12290 } 12291 /** 12292 * <code>optional .SnapshotDescription.Type type = 4 [default = FLUSH];</code> 12293 */ clearType()12294 public Builder clearType() { 12295 bitField0_ = (bitField0_ & ~0x00000008); 12296 type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription.Type.FLUSH; 12297 onChanged(); 12298 return this; 12299 } 12300 12301 // optional int32 version = 5; 12302 private int version_ ; 12303 /** 12304 * <code>optional int32 version = 5;</code> 12305 */ hasVersion()12306 public boolean hasVersion() { 12307 return ((bitField0_ & 0x00000010) == 0x00000010); 12308 } 12309 /** 12310 * <code>optional int32 version = 5;</code> 12311 */ getVersion()12312 public int getVersion() { 12313 return version_; 12314 } 12315 /** 12316 * <code>optional int32 version = 5;</code> 12317 */ setVersion(int value)12318 public Builder setVersion(int value) { 12319 bitField0_ |= 0x00000010; 12320 version_ = value; 12321 onChanged(); 12322 return this; 12323 } 12324 /** 12325 * <code>optional int32 version = 5;</code> 12326 */ clearVersion()12327 public Builder clearVersion() { 12328 bitField0_ = (bitField0_ & ~0x00000010); 12329 version_ = 0; 12330 onChanged(); 12331 return this; 12332 } 12333 12334 // optional string owner = 6; 12335 private java.lang.Object owner_ = ""; 12336 /** 12337 * <code>optional string owner = 6;</code> 12338 */ hasOwner()12339 public boolean hasOwner() { 12340 return ((bitField0_ & 0x00000020) == 0x00000020); 12341 } 12342 /** 12343 * <code>optional string owner = 6;</code> 12344 */ getOwner()12345 public java.lang.String getOwner() { 12346 java.lang.Object ref = owner_; 12347 if (!(ref instanceof java.lang.String)) { 12348 java.lang.String s = ((com.google.protobuf.ByteString) ref) 12349 .toStringUtf8(); 12350 owner_ = s; 12351 return s; 12352 } else { 12353 return (java.lang.String) ref; 12354 } 12355 } 12356 /** 12357 * <code>optional string owner = 6;</code> 12358 */ 12359 public com.google.protobuf.ByteString getOwnerBytes()12360 getOwnerBytes() { 12361 java.lang.Object ref = owner_; 12362 if (ref instanceof String) { 12363 com.google.protobuf.ByteString b = 12364 com.google.protobuf.ByteString.copyFromUtf8( 12365 (java.lang.String) ref); 12366 owner_ = b; 12367 return b; 12368 } else { 12369 return (com.google.protobuf.ByteString) ref; 12370 } 12371 } 12372 /** 12373 * <code>optional string owner = 6;</code> 12374 */ setOwner( java.lang.String value)12375 public Builder setOwner( 12376 java.lang.String value) { 12377 if (value == null) { 12378 throw new NullPointerException(); 12379 } 12380 bitField0_ |= 0x00000020; 12381 owner_ = value; 12382 onChanged(); 12383 return this; 12384 } 12385 /** 12386 * <code>optional string owner = 6;</code> 12387 */ clearOwner()12388 public Builder clearOwner() { 12389 bitField0_ = (bitField0_ & ~0x00000020); 12390 owner_ = getDefaultInstance().getOwner(); 12391 onChanged(); 12392 return this; 12393 } 12394 /** 12395 * <code>optional string owner = 6;</code> 12396 */ setOwnerBytes( com.google.protobuf.ByteString value)12397 public Builder setOwnerBytes( 12398 com.google.protobuf.ByteString value) { 12399 if (value == null) { 12400 throw new NullPointerException(); 12401 } 12402 bitField0_ |= 0x00000020; 12403 owner_ = value; 12404 onChanged(); 12405 return this; 12406 } 12407 12408 // @@protoc_insertion_point(builder_scope:SnapshotDescription) 12409 } 12410 12411 static { 12412 defaultInstance = new SnapshotDescription(true); defaultInstance.initFields()12413 defaultInstance.initFields(); 12414 } 12415 12416 // @@protoc_insertion_point(class_scope:SnapshotDescription) 12417 } 12418 12419 public interface ProcedureDescriptionOrBuilder 12420 extends com.google.protobuf.MessageOrBuilder { 12421 12422 // required string signature = 1; 12423 /** 12424 * <code>required string signature = 1;</code> 12425 * 12426 * <pre> 12427 * the unique signature of the procedure 12428 * </pre> 12429 */ hasSignature()12430 boolean hasSignature(); 12431 /** 12432 * <code>required string signature = 1;</code> 12433 * 12434 * <pre> 12435 * the unique signature of the procedure 12436 * </pre> 12437 */ getSignature()12438 java.lang.String getSignature(); 12439 /** 12440 * <code>required string signature = 1;</code> 12441 * 12442 * <pre> 12443 * the unique signature of the procedure 12444 * </pre> 12445 */ 12446 com.google.protobuf.ByteString getSignatureBytes()12447 getSignatureBytes(); 12448 12449 // optional string instance = 2; 12450 /** 12451 * <code>optional string instance = 2;</code> 12452 * 12453 * <pre> 12454 * the procedure instance name 12455 * </pre> 12456 */ hasInstance()12457 boolean hasInstance(); 12458 /** 12459 * <code>optional string instance = 2;</code> 12460 * 12461 * <pre> 12462 * the procedure instance name 12463 * </pre> 12464 */ getInstance()12465 java.lang.String getInstance(); 12466 /** 12467 * <code>optional string instance = 2;</code> 12468 * 12469 * <pre> 12470 * the procedure instance name 12471 * </pre> 12472 */ 12473 com.google.protobuf.ByteString getInstanceBytes()12474 getInstanceBytes(); 12475 12476 // optional int64 creation_time = 3 [default = 0]; 12477 /** 12478 * <code>optional int64 creation_time = 3 [default = 0];</code> 12479 */ hasCreationTime()12480 boolean hasCreationTime(); 12481 /** 12482 * <code>optional int64 creation_time = 3 [default = 0];</code> 12483 */ getCreationTime()12484 long getCreationTime(); 12485 12486 // repeated .NameStringPair configuration = 4; 12487 /** 12488 * <code>repeated .NameStringPair configuration = 4;</code> 12489 */ 12490 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList()12491 getConfigurationList(); 12492 /** 12493 * <code>repeated .NameStringPair configuration = 4;</code> 12494 */ getConfiguration(int index)12495 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index); 12496 /** 12497 * <code>repeated .NameStringPair configuration = 4;</code> 12498 */ getConfigurationCount()12499 int getConfigurationCount(); 12500 /** 12501 * <code>repeated .NameStringPair configuration = 4;</code> 12502 */ 12503 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()12504 getConfigurationOrBuilderList(); 12505 /** 12506 * <code>repeated .NameStringPair configuration = 4;</code> 12507 */ getConfigurationOrBuilder( int index)12508 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 12509 int index); 12510 } 12511 /** 12512 * Protobuf type {@code ProcedureDescription} 12513 * 12514 * <pre> 12515 ** 12516 * Description of the distributed procedure to take 12517 * </pre> 12518 */ 12519 public static final class ProcedureDescription extends 12520 com.google.protobuf.GeneratedMessage 12521 implements ProcedureDescriptionOrBuilder { 12522 // Use ProcedureDescription.newBuilder() to construct. ProcedureDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder)12523 private ProcedureDescription(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 12524 super(builder); 12525 this.unknownFields = builder.getUnknownFields(); 12526 } ProcedureDescription(boolean noInit)12527 private ProcedureDescription(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 12528 12529 private static final ProcedureDescription defaultInstance; getDefaultInstance()12530 public static ProcedureDescription getDefaultInstance() { 12531 return defaultInstance; 12532 } 12533 getDefaultInstanceForType()12534 public ProcedureDescription getDefaultInstanceForType() { 12535 return defaultInstance; 12536 } 12537 12538 private final com.google.protobuf.UnknownFieldSet unknownFields; 12539 @java.lang.Override 12540 public final com.google.protobuf.UnknownFieldSet getUnknownFields()12541 getUnknownFields() { 12542 return this.unknownFields; 12543 } ProcedureDescription( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12544 private ProcedureDescription( 12545 com.google.protobuf.CodedInputStream input, 12546 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12547 throws com.google.protobuf.InvalidProtocolBufferException { 12548 initFields(); 12549 int mutable_bitField0_ = 0; 12550 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 12551 com.google.protobuf.UnknownFieldSet.newBuilder(); 12552 try { 12553 boolean done = false; 12554 while (!done) { 12555 int tag = input.readTag(); 12556 switch (tag) { 12557 case 0: 12558 done = true; 12559 break; 12560 default: { 12561 if (!parseUnknownField(input, unknownFields, 12562 extensionRegistry, tag)) { 12563 done = true; 12564 } 12565 break; 12566 } 12567 case 10: { 12568 bitField0_ |= 0x00000001; 12569 signature_ = input.readBytes(); 12570 break; 12571 } 12572 case 18: { 12573 bitField0_ |= 0x00000002; 12574 instance_ = input.readBytes(); 12575 break; 12576 } 12577 case 24: { 12578 bitField0_ |= 0x00000004; 12579 creationTime_ = input.readInt64(); 12580 break; 12581 } 12582 case 34: { 12583 if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { 12584 configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(); 12585 mutable_bitField0_ |= 0x00000008; 12586 } 12587 configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry)); 12588 break; 12589 } 12590 } 12591 } 12592 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 12593 throw e.setUnfinishedMessage(this); 12594 } catch (java.io.IOException e) { 12595 throw new com.google.protobuf.InvalidProtocolBufferException( 12596 e.getMessage()).setUnfinishedMessage(this); 12597 } finally { 12598 if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { 12599 configuration_ = java.util.Collections.unmodifiableList(configuration_); 12600 } 12601 this.unknownFields = unknownFields.build(); 12602 makeExtensionsImmutable(); 12603 } 12604 } 12605 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()12606 getDescriptor() { 12607 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ProcedureDescription_descriptor; 12608 } 12609 12610 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()12611 internalGetFieldAccessorTable() { 12612 return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ProcedureDescription_fieldAccessorTable 12613 .ensureFieldAccessorsInitialized( 12614 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription.Builder.class); 12615 } 12616 12617 public static com.google.protobuf.Parser<ProcedureDescription> PARSER = 12618 new com.google.protobuf.AbstractParser<ProcedureDescription>() { 12619 public ProcedureDescription parsePartialFrom( 12620 com.google.protobuf.CodedInputStream input, 12621 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12622 throws com.google.protobuf.InvalidProtocolBufferException { 12623 return new ProcedureDescription(input, extensionRegistry); 12624 } 12625 }; 12626 12627 @java.lang.Override getParserForType()12628 public com.google.protobuf.Parser<ProcedureDescription> getParserForType() { 12629 return PARSER; 12630 } 12631 12632 private int bitField0_; 12633 // required string signature = 1; 12634 public static final int SIGNATURE_FIELD_NUMBER = 1; 12635 private java.lang.Object signature_; 12636 /** 12637 * <code>required string signature = 1;</code> 12638 * 12639 * <pre> 12640 * the unique signature of the procedure 12641 * </pre> 12642 */ hasSignature()12643 public boolean hasSignature() { 12644 return ((bitField0_ & 0x00000001) == 0x00000001); 12645 } 12646 /** 12647 * <code>required string signature = 1;</code> 12648 * 12649 * <pre> 12650 * the unique signature of the procedure 12651 * </pre> 12652 */ getSignature()12653 public java.lang.String getSignature() { 12654 java.lang.Object ref = signature_; 12655 if (ref instanceof java.lang.String) { 12656 return (java.lang.String) ref; 12657 } else { 12658 com.google.protobuf.ByteString bs = 12659 (com.google.protobuf.ByteString) ref; 12660 java.lang.String s = bs.toStringUtf8(); 12661 if (bs.isValidUtf8()) { 12662 signature_ = s; 12663 } 12664 return s; 12665 } 12666 } 12667 /** 12668 * <code>required string signature = 1;</code> 12669 * 12670 * <pre> 12671 * the unique signature of the procedure 12672 * </pre> 12673 */ 12674 public com.google.protobuf.ByteString getSignatureBytes()12675 getSignatureBytes() { 12676 java.lang.Object ref = signature_; 12677 if (ref instanceof java.lang.String) { 12678 com.google.protobuf.ByteString b = 12679 com.google.protobuf.ByteString.copyFromUtf8( 12680 (java.lang.String) ref); 12681 signature_ = b; 12682 return b; 12683 } else { 12684 return (com.google.protobuf.ByteString) ref; 12685 } 12686 } 12687 12688 // optional string instance = 2; 12689 public static final int INSTANCE_FIELD_NUMBER = 2; 12690 private java.lang.Object instance_; 12691 /** 12692 * <code>optional string instance = 2;</code> 12693 * 12694 * <pre> 12695 * the procedure instance name 12696 * </pre> 12697 */ hasInstance()12698 public boolean hasInstance() { 12699 return ((bitField0_ & 0x00000002) == 0x00000002); 12700 } 12701 /** 12702 * <code>optional string instance = 2;</code> 12703 * 12704 * <pre> 12705 * the procedure instance name 12706 * </pre> 12707 */ getInstance()12708 public java.lang.String getInstance() { 12709 java.lang.Object ref = instance_; 12710 if (ref instanceof java.lang.String) { 12711 return (java.lang.String) ref; 12712 } else { 12713 com.google.protobuf.ByteString bs = 12714 (com.google.protobuf.ByteString) ref; 12715 java.lang.String s = bs.toStringUtf8(); 12716 if (bs.isValidUtf8()) { 12717 instance_ = s; 12718 } 12719 return s; 12720 } 12721 } 12722 /** 12723 * <code>optional string instance = 2;</code> 12724 * 12725 * <pre> 12726 * the procedure instance name 12727 * </pre> 12728 */ 12729 public com.google.protobuf.ByteString getInstanceBytes()12730 getInstanceBytes() { 12731 java.lang.Object ref = instance_; 12732 if (ref instanceof java.lang.String) { 12733 com.google.protobuf.ByteString b = 12734 com.google.protobuf.ByteString.copyFromUtf8( 12735 (java.lang.String) ref); 12736 instance_ = b; 12737 return b; 12738 } else { 12739 return (com.google.protobuf.ByteString) ref; 12740 } 12741 } 12742 12743 // optional int64 creation_time = 3 [default = 0]; 12744 public static final int CREATION_TIME_FIELD_NUMBER = 3; 12745 private long creationTime_; 12746 /** 12747 * <code>optional int64 creation_time = 3 [default = 0];</code> 12748 */ hasCreationTime()12749 public boolean hasCreationTime() { 12750 return ((bitField0_ & 0x00000004) == 0x00000004); 12751 } 12752 /** 12753 * <code>optional int64 creation_time = 3 [default = 0];</code> 12754 */ getCreationTime()12755 public long getCreationTime() { 12756 return creationTime_; 12757 } 12758 12759 // repeated .NameStringPair configuration = 4; 12760 public static final int CONFIGURATION_FIELD_NUMBER = 4; 12761 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_; 12762 /** 12763 * <code>repeated .NameStringPair configuration = 4;</code> 12764 */ getConfigurationList()12765 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() { 12766 return configuration_; 12767 } 12768 /** 12769 * <code>repeated .NameStringPair configuration = 4;</code> 12770 */ 12771 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> getConfigurationOrBuilderList()12772 getConfigurationOrBuilderList() { 12773 return configuration_; 12774 } 12775 /** 12776 * <code>repeated .NameStringPair configuration = 4;</code> 12777 */ getConfigurationCount()12778 public int getConfigurationCount() { 12779 return configuration_.size(); 12780 } 12781 /** 12782 * <code>repeated .NameStringPair configuration = 4;</code> 12783 */ getConfiguration(int index)12784 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) { 12785 return configuration_.get(index); 12786 } 12787 /** 12788 * <code>repeated .NameStringPair configuration = 4;</code> 12789 */ getConfigurationOrBuilder( int index)12790 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder( 12791 int index) { 12792 return configuration_.get(index); 12793 } 12794 initFields()12795 private void initFields() { 12796 signature_ = ""; 12797 instance_ = ""; 12798 creationTime_ = 0L; 12799 configuration_ = java.util.Collections.emptyList(); 12800 } 12801 private byte memoizedIsInitialized = -1; isInitialized()12802 public final boolean isInitialized() { 12803 byte isInitialized = memoizedIsInitialized; 12804 if (isInitialized != -1) return isInitialized == 1; 12805 12806 if (!hasSignature()) { 12807 memoizedIsInitialized = 0; 12808 return false; 12809 } 12810 for (int i = 0; i < getConfigurationCount(); i++) { 12811 if (!getConfiguration(i).isInitialized()) { 12812 memoizedIsInitialized = 0; 12813 return false; 12814 } 12815 } 12816 memoizedIsInitialized = 1; 12817 return true; 12818 } 12819 writeTo(com.google.protobuf.CodedOutputStream output)12820 public void writeTo(com.google.protobuf.CodedOutputStream output) 12821 throws java.io.IOException { 12822 getSerializedSize(); 12823 if (((bitField0_ & 0x00000001) == 0x00000001)) { 12824 output.writeBytes(1, getSignatureBytes()); 12825 } 12826 if (((bitField0_ & 0x00000002) == 0x00000002)) { 12827 output.writeBytes(2, getInstanceBytes()); 12828 } 12829 if (((bitField0_ & 0x00000004) == 0x00000004)) { 12830 output.writeInt64(3, creationTime_); 12831 } 12832 for (int i = 0; i < configuration_.size(); i++) { 12833 output.writeMessage(4, configuration_.get(i)); 12834 } 12835 getUnknownFields().writeTo(output); 12836 } 12837 12838 private int memoizedSerializedSize = -1; getSerializedSize()12839 public int getSerializedSize() { 12840 int size = memoizedSerializedSize; 12841 if (size != -1) return size; 12842 12843 size = 0; 12844 if (((bitField0_ & 0x00000001) == 0x00000001)) { 12845 size += com.google.protobuf.CodedOutputStream 12846 .computeBytesSize(1, getSignatureBytes()); 12847 } 12848 if (((bitField0_ & 0x00000002) == 0x00000002)) { 12849 size += com.google.protobuf.CodedOutputStream 12850 .computeBytesSize(2, getInstanceBytes()); 12851 } 12852 if (((bitField0_ & 0x00000004) == 0x00000004)) { 12853 size += com.google.protobuf.CodedOutputStream 12854 .computeInt64Size(3, creationTime_); 12855 } 12856 for (int i = 0; i < configuration_.size(); i++) { 12857 size += com.google.protobuf.CodedOutputStream 12858 .computeMessageSize(4, configuration_.get(i)); 12859 } 12860 size += getUnknownFields().getSerializedSize(); 12861 memoizedSerializedSize = size; 12862 return size; 12863 } 12864 12865 private static final long serialVersionUID = 0L; 12866 @java.lang.Override writeReplace()12867 protected java.lang.Object writeReplace() 12868 throws java.io.ObjectStreamException { 12869 return super.writeReplace(); 12870 } 12871 12872 @java.lang.Override equals(final java.lang.Object obj)12873 public boolean equals(final java.lang.Object obj) { 12874 if (obj == this) { 12875 return true; 12876 } 12877 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription)) { 12878 return super.equals(obj); 12879 } 12880 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription) obj; 12881 12882 boolean result = true; 12883 result = result && (hasSignature() == other.hasSignature()); 12884 if (hasSignature()) { 12885 result = result && getSignature() 12886 .equals(other.getSignature()); 12887 } 12888 result = result && (hasInstance() == other.hasInstance()); 12889 if (hasInstance()) { 12890 result = result && getInstance() 12891 .equals(other.getInstance()); 12892 } 12893 result = result && (hasCreationTime() == other.hasCreationTime()); 12894 if (hasCreationTime()) { 12895 result = result && (getCreationTime() 12896 == other.getCreationTime()); 12897 } 12898 result = result && getConfigurationList() 12899 .equals(other.getConfigurationList()); 12900 result = result && 12901 getUnknownFields().equals(other.getUnknownFields()); 12902 return result; 12903 } 12904 12905 private int memoizedHashCode = 0; 12906 @java.lang.Override hashCode()12907 public int hashCode() { 12908 if (memoizedHashCode != 0) { 12909 return memoizedHashCode; 12910 } 12911 int hash = 41; 12912 hash = (19 * hash) + getDescriptorForType().hashCode(); 12913 if (hasSignature()) { 12914 hash = (37 * hash) + SIGNATURE_FIELD_NUMBER; 12915 hash = (53 * hash) + getSignature().hashCode(); 12916 } 12917 if (hasInstance()) { 12918 hash = (37 * hash) + INSTANCE_FIELD_NUMBER; 12919 hash = (53 * hash) + getInstance().hashCode(); 12920 } 12921 if (hasCreationTime()) { 12922 hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER; 12923 hash = (53 * hash) + hashLong(getCreationTime()); 12924 } 12925 if (getConfigurationCount() > 0) { 12926 hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER; 12927 hash = (53 * hash) + getConfigurationList().hashCode(); 12928 } 12929 hash = (29 * hash) + getUnknownFields().hashCode(); 12930 memoizedHashCode = hash; 12931 return hash; 12932 } 12933 parseFrom( com.google.protobuf.ByteString data)12934 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( 12935 com.google.protobuf.ByteString data) 12936 throws com.google.protobuf.InvalidProtocolBufferException { 12937 return PARSER.parseFrom(data); 12938 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12939 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( 12940 com.google.protobuf.ByteString data, 12941 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12942 throws com.google.protobuf.InvalidProtocolBufferException { 12943 return PARSER.parseFrom(data, extensionRegistry); 12944 } parseFrom(byte[] data)12945 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(byte[] data) 12946 throws com.google.protobuf.InvalidProtocolBufferException { 12947 return PARSER.parseFrom(data); 12948 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12949 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( 12950 byte[] data, 12951 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12952 throws com.google.protobuf.InvalidProtocolBufferException { 12953 return PARSER.parseFrom(data, extensionRegistry); 12954 } parseFrom(java.io.InputStream input)12955 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom(java.io.InputStream input) 12956 throws java.io.IOException { 12957 return PARSER.parseFrom(input); 12958 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)12959 public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription parseFrom( 12960 java.io.InputStream input, 12961 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 12962 throws java.io.IOException { 12963 return PARSER.parseFrom(input, extensionRegistry); 12964 } parseDelimitedFrom(java.io.InputStream input)12965